diff --git a/doc/source/_static/dpf_operators.html b/doc/source/_static/dpf_operators.html
index 4ee0c61151c..cd8dad8af42 100644
--- a/doc/source/_static/dpf_operators.html
+++ b/doc/source/_static/dpf_operators.html
@@ -2070,7 +2070,7 @@
Configurating operators
0 0 0
0 0 0
0 0 0
-">Example of workflows and their scripts
math: imaginary part
Inputs
Outputs
Configurations
Scripting
math: amplitude (fields container)
Inputs
Outputs
Configurations
Scripting
metadata: mesh support provider
Inputs
Outputs
Configurations
Scripting
result: beam axial stress (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: unit convert (fields container)
Inputs
Outputs
Configurations
Scripting
result: element orientations X
Inputs
Outputs
Configurations
Scripting
math: norm (fields container)
Inputs
Outputs
Configurations
Scripting
mapping: prepare mapping workflow
Inputs
Outputs
Configurations
Scripting
math: sqrt (fields container)
Inputs
Outputs
Configurations
Scripting
math: conjugate
Inputs
Outputs
Configurations
Scripting
utility: html doc
Inputs
Outputs
Configurations
Scripting
math: real part
Inputs
Outputs
Configurations
Scripting
result: current density
Inputs
Outputs
Configurations
Scripting
logic: same property fields container?
Inputs
Outputs
Configurations
Scripting
math: multiply (complex fields)
Inputs
Outputs
Configurations
Scripting
utility: merge result infos
Inputs
Outputs
Configurations
Scripting
result: cyclic kinetic energy
Inputs
Outputs
Configurations
Scripting
result: global total mass (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: unit convert
Inputs
Outputs
Configurations
Scripting
math: norm (field)
Inputs
Outputs
Configurations
Scripting
utility: make label space
Inputs
Outputs
Configurations
Scripting
math: sqrt (field)
Inputs
Outputs
Configurations
Scripting
result: y plus (y+)
Inputs
Outputs
Configurations
Scripting
math: accumulate min over label
Inputs
Outputs
Configurations
Scripting
math: +
Inputs
Outputs
Configurations
Scripting
server: grpc shutdown server
Inputs
Outputs
Configurations
Scripting
result: magnetic scalar potential
Inputs
Outputs
Configurations
Scripting
min_max: min max over time
Inputs
Outputs
Configurations
Scripting
math: time freq interpolation
Inputs
Outputs
Configurations
Scripting
math: + (fields container)
Inputs
Outputs
Configurations
Scripting
math: sin (fields container)
Inputs
Outputs
Configurations
Scripting
math: + constant (field)
Inputs
Outputs
Configurations
Scripting
math: / (component-wise field)
Inputs
Outputs
Configurations
Scripting
math: + constant (fields container)
Inputs
Outputs
Configurations
Scripting
math: cross product (fields container)
Inputs
Outputs
Configurations
Scripting
result: cyclic strain energy
Inputs
Outputs
Configurations
Scripting
invariant: scalar invariants (fields container)
Inputs
Outputs
Configurations
Scripting
mapping: find reduced coordinates
Inputs
Outputs
Configurations
Scripting
scoping: rescope property field
Inputs
Outputs
Configurations
Scripting
result: plastic strain principal 1
Inputs
Outputs
Configurations
Scripting
math: -
Inputs
Outputs
Configurations
Scripting
math: total sum
Inputs
Outputs
Configurations
Scripting
math: - (fields container)
Inputs
Outputs
Configurations
Scripting
scoping: intersect scopings
Inputs
Outputs
Configurations
Scripting
math: ^ (field)
Inputs
Outputs
Configurations
Scripting
scoping: elements in mesh
Inputs
Outputs
Configurations
Scripting
math: scale (field)
Inputs
Outputs
Configurations
Scripting
result: enthalpy
Inputs
Outputs
Configurations
Scripting
math: ^ (fields container)
Inputs
Outputs
Configurations
Scripting
result: global eroded internal energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: scale fields container
Inputs
Outputs
Configurations
Scripting
math: sweeping phase
Inputs
Outputs
Configurations
Scripting
math: centroid
Inputs
Outputs
Configurations
Scripting
math: sweeping phase (fields container)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (field)
Inputs
Outputs
Configurations
Scripting
math: centroid (fields container)
Inputs
Outputs
Configurations
Scripting
math: ^2 (field)
Inputs
Outputs
Configurations
Scripting
utility: remove unnecessary labels
Inputs
Outputs
Configurations
Scripting
result: velocity Z
Inputs
Outputs
Configurations
Scripting
result: reaction force Z
Inputs
Outputs
Configurations
Scripting
math: sin (field)
Inputs
Outputs
Configurations
Scripting
math: cos (field)
Inputs
Outputs
Configurations
Scripting
math: cos (fields container)
Inputs
Outputs
Configurations
Scripting
logic: ascending sort
Inputs
Outputs
Configurations
Scripting
result: initial coordinates (LSDyna)
Inputs
Outputs
Configurations
Scripting
utility: convert to fields container
Inputs
Outputs
Configurations
Scripting
math: linear combination
Inputs
Outputs
Configurations
Scripting
math: ^2 (fields container)
Inputs
Outputs
Configurations
Scripting
result: mean static pressure
Inputs
Outputs
Configurations
Scripting
math: exp (field)
Inputs
Outputs
Configurations
Scripting
math: exp (fields container)
Inputs
Outputs
Configurations
Scripting
math: * (component-wise field)
Inputs
Outputs
Configurations
Scripting
result: stress max_shear
Inputs
Outputs
Configurations
Scripting
result: euler nodes
Inputs
Outputs
Configurations
Scripting
math: * (component-wise field) (fields container)
Inputs
Outputs
Configurations
Scripting
logic: same meshes container?
Inputs
Outputs
Configurations
Scripting
result: beam TR shear stress (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: num surface status changes
Inputs
Outputs
Configurations
Scripting
math: ln (field)
Inputs
Outputs
Configurations
Scripting
mesh: mesh to pyvista
Inputs
Outputs
Configurations
Scripting
math: ln (fields container)
Inputs
Outputs
Configurations
Scripting
invariant: scalar invariants (field)
Inputs
Outputs
Configurations
Scripting
math: cross product
Inputs
Outputs
Configurations
Scripting
filter: high pass (timefreq)
Inputs
Outputs
Configurations
Scripting
math: / (component-wise fields container)
Inputs
Outputs
Configurations
Scripting
result: global sliding interface energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: kronecker product
Inputs
Outputs
Configurations
Scripting
math: modulus (fields container)
Inputs
Outputs
Configurations
Scripting
result: joint relative angular velocity
Inputs
Outputs
Configurations
Scripting
math: dot (complex fields)
Inputs
Outputs
Configurations
Scripting
result: gasket stress XZ
Inputs
Outputs
Configurations
Scripting
math: / (complex fields)
Inputs
Outputs
Configurations
Scripting
utility: unitary field
Inputs
Outputs
Configurations
Scripting
utility: server path
Inputs
Outputs
Configurations
Scripting
result: beam axial force (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: derivate (complex fields)
Inputs
Outputs
Configurations
Scripting
math: polar to complex fields
Inputs
Outputs
Configurations
Scripting
utility: merge data tree
Inputs
Outputs
Configurations
Scripting
math: dot (fields container)
Inputs
Outputs
Configurations
Scripting
result: nodal moment
Inputs
Outputs
Configurations
Scripting
math: phase (field)
Inputs
Outputs
Configurations
Scripting
math: phase (fields container)
Inputs
Outputs
Configurations
Scripting
math: modulus (field)
Inputs
Outputs
Configurations
Scripting
result: elemental mass
Inputs
Outputs
Configurations
Scripting
result: heat flux
Inputs
Outputs
Configurations
Scripting
math: total sum (fields container)
Inputs
Outputs
Configurations
Scripting
result: co-energy
Inputs
Outputs
Configurations
Scripting
math: dot
Inputs
Outputs
Configurations
Scripting
math: outer product
Inputs
Outputs
Configurations
Scripting
math: overall dot
Inputs
Outputs
Configurations
Scripting
math: relative error
Inputs
Outputs
Configurations
Scripting
result: velocity Y
Inputs
Outputs
Configurations
Scripting
result: reaction force Y
Inputs
Outputs
Configurations
Scripting
result: global velocity (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: superficial velocity
Inputs
Outputs
Configurations
Scripting
math: absolute value by component (field)
Inputs
Outputs
Configurations
Scripting
result: incremental energy
Inputs
Outputs
Configurations
Scripting
result: thermal strain
Inputs
Outputs
Configurations
Scripting
result: stiffness matrix energy
Inputs
Outputs
Configurations
Scripting
math: absolute value by component (fields container)
Inputs
Outputs
Configurations
Scripting
metadata: element types provider
Inputs
Outputs
Configurations
Scripting
result: total temperature
Inputs
Outputs
Configurations
Scripting
result: cyclic equivalent mass
Inputs
Outputs
Configurations
Scripting
result: acceleration Y
Inputs
Outputs
Configurations
Scripting
utility: delegate to operator
Inputs
Outputs
Configurations
Scripting
logic: component selector (fields container)
Inputs
Outputs
Configurations
Scripting
logic: component selector (field)
Inputs
Outputs
Configurations
Scripting
scoping: on property
Inputs
Outputs
Configurations
Scripting
result: stress intensity
Inputs
Outputs
Configurations
Scripting
logic: same property fields?
Inputs
Outputs
Configurations
Scripting
logic: elementary data selector (fields container)
Inputs
Outputs
Configurations
Scripting
utility: convert to scoping
Inputs
Outputs
Configurations
Scripting
logic: elementary data selector (field)
Inputs
Outputs
Configurations
Scripting
utility: change location
Inputs
Outputs
Configurations
Scripting
mesh: node coordinates
Inputs
Outputs
Configurations
Scripting
mesh: stl export
Inputs
Outputs
Configurations
Scripting
utility: bind support
Inputs
Outputs
Configurations
Scripting
utility: convert to meshes container
Inputs
Outputs
Configurations
Scripting
result: beam torsional moment (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: equivalent stress parameter
Inputs
Outputs
Configurations
Scripting
utility: convert to field
Inputs
Outputs
Configurations
Scripting
result: beam axial total strain (LSDyna)
Inputs
Outputs
Configurations
Scripting
utility: set property
Inputs
Outputs
Configurations
Scripting
result: electric flux density Y
Inputs
Outputs
Configurations
Scripting
utility: forward field
Inputs
Outputs
Configurations
Scripting
utility: forward fields container
Inputs
Outputs
Configurations
Scripting
result: electric flux density
Inputs
Outputs
Configurations
Scripting
result: plastic strain principal 2
Inputs
Outputs
Configurations
Scripting
utility: forward meshes container
Inputs
Outputs
Configurations
Scripting
geo: integrate over elements
Inputs
Outputs
Configurations
Scripting
result: compute total strain X
Example of workflows and their scripts
math: imaginary part
Inputs
Outputs
Configurations
Scripting
math: amplitude (fields container)
Inputs
Outputs
Configurations
Scripting
metadata: mesh support provider
Inputs
Outputs
Configurations
Scripting
result: beam axial stress (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: unit convert (fields container)
Inputs
Outputs
Configurations
Scripting
result: element orientations X
Inputs
Outputs
Configurations
Scripting
math: norm (fields container)
Inputs
Outputs
Configurations
Scripting
mapping: prepare mapping workflow
Inputs
Outputs
Configurations
Scripting
math: sqrt (fields container)
Inputs
Outputs
Configurations
Scripting
math: conjugate
Inputs
Outputs
Configurations
Scripting
utility: html doc
Inputs
Outputs
Configurations
Scripting
math: real part
Inputs
Outputs
Configurations
Scripting
result: current density
Inputs
Outputs
Configurations
Scripting
logic: same property fields container?
Inputs
Outputs
Configurations
Scripting
math: multiply (complex fields)
Inputs
Outputs
Configurations
Scripting
utility: merge result infos
Inputs
Outputs
Configurations
Scripting
result: cyclic kinetic energy
Inputs
Outputs
Configurations
Scripting
result: global total mass (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: unit convert
Inputs
Outputs
Configurations
Scripting
math: norm (field)
Inputs
Outputs
Configurations
Scripting
utility: make label space
Inputs
Outputs
Configurations
Scripting
math: sqrt (field)
Inputs
Outputs
Configurations
Scripting
result: y plus (y+)
Inputs
Outputs
Configurations
Scripting
math: accumulate min over label
Inputs
Outputs
Configurations
Scripting
math: +
Inputs
Outputs
Configurations
Scripting
server: grpc shutdown server
Inputs
Outputs
Configurations
Scripting
result: magnetic scalar potential
Inputs
Outputs
Configurations
Scripting
min_max: min max over time
Inputs
Outputs
Configurations
Scripting
math: time freq interpolation
Inputs
Outputs
Configurations
Scripting
math: + (fields container)
Inputs
Outputs
Configurations
Scripting
math: sin (fields container)
Inputs
Outputs
Configurations
Scripting
math: + constant (field)
Inputs
Outputs
Configurations
Scripting
math: / (component-wise field)
Inputs
Outputs
Configurations
Scripting
math: + constant (fields container)
Inputs
Outputs
Configurations
Scripting
math: cross product (fields container)
Inputs
Outputs
Configurations
Scripting
result: cyclic strain energy
Inputs
Outputs
Configurations
Scripting
invariant: scalar invariants (fields container)
Inputs
Outputs
Configurations
Scripting
mapping: find reduced coordinates
Inputs
Outputs
Configurations
Scripting
scoping: rescope property field
Inputs
Outputs
Configurations
Scripting
result: plastic strain principal 1
Inputs
Outputs
Configurations
Scripting
math: -
Inputs
Outputs
Configurations
Scripting
math: total sum
Inputs
Outputs
Configurations
Scripting
math: - (fields container)
Inputs
Outputs
Configurations
Scripting
scoping: intersect scopings
Inputs
Outputs
Configurations
Scripting
math: ^ (field)
Inputs
Outputs
Configurations
Scripting
scoping: elements in mesh
Inputs
Outputs
Configurations
Scripting
math: scale (field)
Inputs
Outputs
Configurations
Scripting
result: enthalpy
Inputs
Outputs
Configurations
Scripting
math: ^ (fields container)
Inputs
Outputs
Configurations
Scripting
result: global eroded internal energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: scale fields container
Inputs
Outputs
Configurations
Scripting
math: sweeping phase
Inputs
Outputs
Configurations
Scripting
math: centroid
Inputs
Outputs
Configurations
Scripting
math: sweeping phase (fields container)
Inputs
Outputs
Configurations
Scripting
filter: signed high pass (field)
Inputs
Outputs
Configurations
Scripting
math: centroid (fields container)
Inputs
Outputs
Configurations
Scripting
math: ^2 (field)
Inputs
Outputs
Configurations
Scripting
utility: remove unnecessary labels
Inputs
Outputs
Configurations
Scripting
result: velocity Z
Inputs
Outputs
Configurations
Scripting
result: reaction force Z
Inputs
Outputs
Configurations
Scripting
math: sin (field)
Inputs
Outputs
Configurations
Scripting
math: cos (field)
Inputs
Outputs
Configurations
Scripting
math: cos (fields container)
Inputs
Outputs
Configurations
Scripting
logic: ascending sort
Inputs
Outputs
Configurations
Scripting
result: initial coordinates (LSDyna)
Inputs
Outputs
Configurations
Scripting
utility: convert to fields container
Inputs
Outputs
Configurations
Scripting
math: linear combination
Inputs
Outputs
Configurations
Scripting
math: ^2 (fields container)
Inputs
Outputs
Configurations
Scripting
result: mean static pressure
Inputs
Outputs
Configurations
Scripting
math: exp (field)
Inputs
Outputs
Configurations
Scripting
math: exp (fields container)
Inputs
Outputs
Configurations
Scripting
math: * (component-wise field)
Inputs
Outputs
Configurations
Scripting
result: stress max_shear
Inputs
Outputs
Configurations
Scripting
result: euler nodes
Inputs
Outputs
Configurations
Scripting
math: * (component-wise field) (fields container)
Inputs
Outputs
Configurations
Scripting
logic: same meshes container?
Inputs
Outputs
Configurations
Scripting
result: beam TR shear stress (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: num surface status changes
Inputs
Outputs
Configurations
Scripting
math: ln (field)
Inputs
Outputs
Configurations
Scripting
mesh: mesh to pyvista
Inputs
Outputs
Configurations
Scripting
math: ln (fields container)
Inputs
Outputs
Configurations
Scripting
invariant: scalar invariants (field)
Inputs
Outputs
Configurations
Scripting
math: cross product
Inputs
Outputs
Configurations
Scripting
filter: high pass (timefreq)
Inputs
Outputs
Configurations
Scripting
math: / (component-wise fields container)
Inputs
Outputs
Configurations
Scripting
result: global sliding interface energy (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: kronecker product
Inputs
Outputs
Configurations
Scripting
math: modulus (fields container)
Inputs
Outputs
Configurations
Scripting
result: joint relative angular velocity
Inputs
Outputs
Configurations
Scripting
math: dot (complex fields)
Inputs
Outputs
Configurations
Scripting
result: gasket stress XZ
Inputs
Outputs
Configurations
Scripting
math: / (complex fields)
Inputs
Outputs
Configurations
Scripting
utility: unitary field
Inputs
Outputs
Configurations
Scripting
utility: server path
Inputs
Outputs
Configurations
Scripting
result: beam axial force (LSDyna)
Inputs
Outputs
Configurations
Scripting
math: derivate (complex fields)
Inputs
Outputs
Configurations
Scripting
math: polar to complex fields
Inputs
Outputs
Configurations
Scripting
utility: merge data tree
Inputs
Outputs
Configurations
Scripting
math: dot (fields container)
Inputs
Outputs
Configurations
Scripting
result: nodal moment
Inputs
Outputs
Configurations
Scripting
math: phase (field)
Inputs
Outputs
Configurations
Scripting
math: phase (fields container)
Inputs
Outputs
Configurations
Scripting
math: modulus (field)
Inputs
Outputs
Configurations
Scripting
result: elemental mass
Inputs
Outputs
Configurations
Scripting
result: heat flux
Inputs
Outputs
Configurations
Scripting
math: total sum (fields container)
Inputs
Outputs
Configurations
Scripting
result: co-energy
Inputs
Outputs
Configurations
Scripting
math: dot
Inputs
Outputs
Configurations
Scripting
math: outer product
Inputs
Outputs
Configurations
Scripting
math: overall dot
Inputs
Outputs
Configurations
Scripting
math: relative error
Inputs
Outputs
Configurations
Scripting
result: velocity Y
Inputs
Outputs
Configurations
Scripting
result: reaction force Y
Inputs
Outputs
Configurations
Scripting
result: global velocity (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: superficial velocity
Inputs
Outputs
Configurations
Scripting
math: absolute value by component (field)
Inputs
Outputs
Configurations
Scripting
result: incremental energy
Inputs
Outputs
Configurations
Scripting
result: thermal strain
Inputs
Outputs
Configurations
Scripting
result: stiffness matrix energy
Inputs
Outputs
Configurations
Scripting
math: absolute value by component (fields container)
Inputs
Outputs
Configurations
Scripting
metadata: element types provider
Inputs
Outputs
Configurations
Scripting
result: total temperature
Inputs
Outputs
Configurations
Scripting
result: cyclic equivalent mass
Inputs
Outputs
Configurations
Scripting
result: acceleration Y
Inputs
Outputs
Configurations
Scripting
utility: delegate to operator
Inputs
Outputs
Configurations
Scripting
logic: component selector (fields container)
Inputs
Outputs
Configurations
Scripting
logic: component selector (field)
Inputs
Outputs
Configurations
Scripting
scoping: on property
Inputs
Outputs
Configurations
Scripting
result: stress intensity
Inputs
Outputs
Configurations
Scripting
logic: same property fields?
Inputs
Outputs
Configurations
Scripting
logic: elementary data selector (fields container)
Inputs
Outputs
Configurations
Scripting
utility: convert to scoping
Inputs
Outputs
Configurations
Scripting
logic: elementary data selector (field)
Inputs
Outputs
Configurations
Scripting
utility: change location
Inputs
Outputs
Configurations
Scripting
mesh: node coordinates
Inputs
Outputs
Configurations
Scripting
mesh: stl export
Inputs
Outputs
Configurations
Scripting
utility: bind support
Inputs
Outputs
Configurations
Scripting
utility: convert to meshes container
Inputs
Outputs
Configurations
Scripting
result: beam torsional moment (LSDyna)
Inputs
Outputs
Configurations
Scripting
result: equivalent stress parameter
Inputs
Outputs
Configurations
Scripting
utility: convert to field
Inputs
Outputs
Configurations
Scripting
result: beam axial total strain (LSDyna)
Inputs
Outputs
Configurations
Scripting
utility: set property
Inputs
Outputs
Configurations
Scripting
result: electric flux density Y
Inputs
Outputs
Configurations
Scripting
utility: forward field
Inputs
Outputs
Configurations
Scripting
utility: forward fields container
Inputs
Outputs
Configurations
Scripting
result: electric flux density
Inputs
Outputs
Configurations
Scripting
result: plastic strain principal 2
Inputs
Outputs
Configurations
Scripting
utility: forward meshes container
Inputs
Outputs
Configurations
Scripting
geo: integrate over elements
Inputs
Outputs
Configurations
Scripting
result: compute total strain X
Configurating operators
- If the determinant of the I matrix is zero, switch to an inverse distance weighted average.
- If not, compute the Frink weights and apply the Holmes' weight clip.
- If the clipping produces a large overshoot, inverse volume weighted average is used..
-3. For a face finite volume mesh inverse distance weighted average is used.">Inputs
Outputs
Configurations
Scripting
averaging: to nodal (field)
Inputs
Outputs
Configurations
Scripting
averaging: to nodal (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: elemental mean (field)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental (field)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental nodal (field)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental nodal (fields container)
Inputs
Outputs
Configurations
Scripting
invariant: eigen values (field)
Inputs
Outputs
Configurations
Scripting
invariant: principal invariants (field)
Inputs
Outputs
Configurations
Scripting
invariant: von mises eqv (fields container)
Inputs
Outputs
Configurations
Scripting
invariant: segalman von mises eqv (fields container)
Inputs
Outputs
Configurations
Scripting
scoping: compute element centroids
Inputs
Outputs
Configurations
Scripting
metadata: cyclic mesh expansion
Inputs
Outputs
Configurations
Scripting
result: cyclic analytic stress eqv max
Inputs
Outputs
Configurations
Scripting
result: remove rigid body motion (fields container)
Inputs
Outputs
Configurations
Scripting
result: cyclic expansion
Inputs
Outputs
Configurations
Scripting
averaging: nodal fraction (fields container)
Inputs
Outputs
Configurations
Scripting
result: recombine cyclic harmonic indices
Inputs
Outputs
Configurations
Scripting
mapping: on coordinates
Inputs
Outputs
Configurations
Scripting
mapping: scoping on coordinates
Inputs
Outputs
Configurations
Scripting
filter: abc weightings
Inputs
Outputs
Configurations
Scripting
mapping: solid to skin
Inputs
Outputs
Configurations
Scripting
mapping: solid to skin (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: elemental difference (field)
Inputs
Outputs
Configurations
Scripting
averaging: elemental fraction (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: extend to mid nodes (fields container)
Inputs
Outputs
Configurations
Scripting
geo: rotate cylindrical coordinates
Inputs
Outputs
Configurations
Scripting
geo: rotate in cylindrical coordinates (fields container)
Inputs
Outputs
Configurations
Scripting
geo: spherical to cartesian coordinates (fields container)
Inputs
Outputs
Configurations
Scripting
geo: spherical to cartesian coordinates
Inputs
Outputs
Configurations
Scripting
mesh: change cs (meshes)
Inputs
Outputs
Configurations
Scripting
geo: normals provider nl (nodes, faces, or elements)
Inputs
Outputs
Configurations
Scripting
geo: elements volumes over time
Inputs
Outputs
Configurations
Scripting
math: window bartlett
Inputs
Outputs
Configurations
Scripting
mesh: from scoping
Inputs
Outputs
Configurations
Scripting
mesh: split field wrt mesh regions
Inputs
Outputs
Configurations
Scripting
result: torque
Inputs
Outputs
Configurations
Scripting
result: euler load buckling
Inputs
Outputs
Configurations
Scripting
geo: faces area
Inputs
Outputs
Configurations
Scripting
result: compute stress 3
Inputs
Outputs
Configurations
Scripting
geo: gauss to node (field)
Inputs
Outputs
Configurations
Scripting
averaging: gauss to node (fields container)
Inputs
Outputs
Configurations
Scripting
math: correlation
Inputs
Outputs
Configurations
Scripting
math: mac
Inputs
Outputs
Configurations
Scripting
result: workflow energy per component
Inputs
Outputs
Configurations
Scripting
result: add rigid body motion (field)
Inputs
Outputs
Configurations
Scripting
result: split on facet indices
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded temperature
Inputs
Outputs
Configurations
Scripting
result: enf solution to global cs
Inputs
Outputs
Configurations
Scripting
result: cms matrices provider
Inputs
Outputs
Configurations
Scripting
result: rom data provider
Inputs
Outputs
Configurations
Scripting
result: prns to field
Inputs
Outputs
Configurations
Scripting
result: remove rigid body motion (field)
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded displacement
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded acceleration
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded stress
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded el strain
Inputs
Outputs
Configurations
Scripting
result: cms subfile info provider
Inputs
Outputs
Configurations
Scripting
result: cyclic volume
Inputs
Outputs
Configurations
Scripting
result: cyclic nmisc
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded element heat flux
Inputs
Outputs
Configurations
Scripting
result: stress solution to global cs
Inputs
Outputs
Configurations
Scripting
invariant: convertnum operator
Inputs
Outputs
Configurations
Scripting
result: compute total strain XZ
Inputs
Outputs
Configurations
Scripting
averaging: to nodal (field)
Inputs
Outputs
Configurations
Scripting
averaging: to nodal (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: elemental mean (field)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental (field)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental nodal (field)
Inputs
Outputs
Configurations
Scripting
averaging: nodal to elemental nodal (fields container)
Inputs
Outputs
Configurations
Scripting
invariant: eigen values (field)
Inputs
Outputs
Configurations
Scripting
invariant: principal invariants (field)
Inputs
Outputs
Configurations
Scripting
invariant: von mises eqv (fields container)
Inputs
Outputs
Configurations
Scripting
invariant: segalman von mises eqv (fields container)
Inputs
Outputs
Configurations
Scripting
scoping: compute element centroids
Inputs
Outputs
Configurations
Scripting
metadata: cyclic mesh expansion
Inputs
Outputs
Configurations
Scripting
result: cyclic analytic stress eqv max
Inputs
Outputs
Configurations
Scripting
result: remove rigid body motion (fields container)
Inputs
Outputs
Configurations
Scripting
result: cyclic expansion
Inputs
Outputs
Configurations
Scripting
averaging: nodal fraction (fields container)
Inputs
Outputs
Configurations
Scripting
result: recombine cyclic harmonic indices
Inputs
Outputs
Configurations
Scripting
mapping: on coordinates
Inputs
Outputs
Configurations
Scripting
mapping: scoping on coordinates
Inputs
Outputs
Configurations
Scripting
filter: abc weightings
Inputs
Outputs
Configurations
Scripting
mapping: solid to skin
Inputs
Outputs
Configurations
Scripting
mapping: solid to skin (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: elemental difference (field)
Inputs
Outputs
Configurations
Scripting
averaging: elemental fraction (fields container)
Inputs
Outputs
Configurations
Scripting
averaging: extend to mid nodes (fields container)
Inputs
Outputs
Configurations
Scripting
geo: rotate cylindrical coordinates
Inputs
Outputs
Configurations
Scripting
geo: rotate in cylindrical coordinates (fields container)
Inputs
Outputs
Configurations
Scripting
geo: spherical to cartesian coordinates (fields container)
Inputs
Outputs
Configurations
Scripting
geo: spherical to cartesian coordinates
Inputs
Outputs
Configurations
Scripting
mesh: change cs (meshes)
Inputs
Outputs
Configurations
Scripting
geo: normals provider nl (nodes, faces, or elements)
Inputs
Outputs
Configurations
Scripting
geo: elements volumes over time
Inputs
Outputs
Configurations
Scripting
math: window bartlett
Inputs
Outputs
Configurations
Scripting
mesh: from scoping
Inputs
Outputs
Configurations
Scripting
mesh: split field wrt mesh regions
Inputs
Outputs
Configurations
Scripting
result: torque
Inputs
Outputs
Configurations
Scripting
result: euler load buckling
Inputs
Outputs
Configurations
Scripting
geo: faces area
Inputs
Outputs
Configurations
Scripting
result: compute stress 3
Inputs
Outputs
Configurations
Scripting
geo: gauss to node (field)
Inputs
Outputs
Configurations
Scripting
averaging: gauss to node (fields container)
Inputs
Outputs
Configurations
Scripting
math: correlation
Inputs
Outputs
Configurations
Scripting
math: mac
Inputs
Outputs
Configurations
Scripting
result: workflow energy per component
Inputs
Outputs
Configurations
Scripting
result: add rigid body motion (field)
Inputs
Outputs
Configurations
Scripting
result: split on facet indices
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded temperature
Inputs
Outputs
Configurations
Scripting
result: pretension
Inputs
Outputs
Configurations
Scripting
result: enf solution to global cs
Inputs
Outputs
Configurations
Scripting
result: cms matrices provider
Inputs
Outputs
Configurations
Scripting
result: rom data provider
Inputs
Outputs
Configurations
Scripting
result: prns to field
Inputs
Outputs
Configurations
Scripting
result: remove rigid body motion (field)
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded displacement
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded acceleration
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded stress
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded el strain
Inputs
Outputs
Configurations
Scripting
result: cms subfile info provider
Inputs
Outputs
Configurations
Scripting
result: cyclic volume
Inputs
Outputs
Configurations
Scripting
result: cyclic nmisc
Inputs
Outputs
Configurations
Scripting
result: cyclic expanded element heat flux
Inputs
Outputs
Configurations
Scripting
result: stress solution to global cs
Inputs
Outputs
Configurations
Scripting
invariant: convertnum operator
Inputs
Outputs
Configurations
Scripting
result: compute total strain XZ
Specification:
+ description = r"""Transforms an Elemental Nodal or Nodal field into an Elemental field.
+Each elemental value is the maximum difference between the computed
+result for all nodes in this element. The result is computed on a given
+element scoping.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -97,28 +100,25 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""Average only on these entities""",
+ document=r"""average only on these entities""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""""",
+ document=r"""""",
),
10: PinSpecification(
name="through_layers",
type_names=["bool"],
optional=True,
- document="""The maximum elemental difference is taken
- through the different shell layers if
- true (default is false).""",
+ document=r"""The maximum elemental difference is taken through the different shell layers if true (default is false).""",
),
},
map_output_pin_spec={
@@ -126,14 +126,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -142,29 +142,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="elemental_difference", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalDifference:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalDifference
+ inputs:
+ An instance of InputsElementalDifference.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalDifference:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalDifference
+ outputs:
+ An instance of OutputsElementalDifference.
"""
return super().outputs
@@ -201,15 +208,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._through_layers)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -222,14 +229,15 @@ def field(self):
return self._field
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Average only on these entities
+ average only on these entities
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -242,12 +250,13 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -260,16 +269,15 @@ def mesh(self):
return self._mesh
@property
- def through_layers(self):
- """Allows to connect through_layers input to the operator.
+ def through_layers(self) -> Input:
+ r"""Allows to connect through_layers input to the operator.
- The maximum elemental difference is taken
- through the different shell layers if
- true (default is false).
+ The maximum elemental difference is taken through the different shell layers if true (default is false).
- Parameters
- ----------
- my_through_layers : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -300,18 +308,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.elemental_difference()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/averaging/elemental_difference_fc.py b/src/ansys/dpf/core/operators/averaging/elemental_difference_fc.py
index 8a53aecf07c..826e33299d1 100644
--- a/src/ansys/dpf/core/operators/averaging/elemental_difference_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/elemental_difference_fc.py
@@ -4,43 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elemental_difference_fc(Operator):
- """Transforms an Elemental Nodal or Nodal field into an Elemental field.
- Each elemental value is the maximum difference between the
- unaveraged or averaged (depending on the input fields) computed
- result for all nodes in this element. The result is computed on a
- given element scoping. If the input fields are mixed shell/solid,
- and the shell's layers are not specified as collapsed, then the
- fields are split by element shape and the output fields container
- has an elshape label.
+ r"""Transforms an Elemental Nodal or Nodal field into an Elemental field.
+ Each elemental value is the maximum difference between the unaveraged or
+ averaged (depending on the input fields) computed result for all nodes
+ in this element. The result is computed on a given element scoping. If
+ the input fields are mixed shell/solid, and the shell’s layers are not
+ specified as collapsed, then the fields are split by element shape and
+ the output fields container has an elshape label.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh : MeshedRegion or MeshesContainer, optional
- The mesh region in this pin is used to
- perform the averaging, used if there
- is no fields support.
- scoping : Scoping or ScopingsContainer, optional
- Average only on these elements. if it is
- scoping container, the label must
- correspond to the one of the fields
- container.
- collapse_shell_layers : bool, optional
- If true, the data across different shell
- layers is averaged as well (default
- is false).
+ fields_container: FieldsContainer
+ mesh: MeshedRegion or MeshesContainer, optional
+ The mesh region in this pin is used to perform the averaging, used if there is no fields support.
+ scoping: Scoping or ScopingsContainer, optional
+ Average only on these elements. If it is scoping container, the label must correspond to the one of the fields container.
+ collapse_shell_layers: bool, optional
+ If true, the data across different shell layers is averaged as well (default is false).
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -93,16 +90,15 @@ def __init__(
self.inputs.collapse_shell_layers.connect(collapse_shell_layers)
@staticmethod
- def _spec():
- description = """Transforms an Elemental Nodal or Nodal field into an Elemental field.
- Each elemental value is the maximum difference between the
- unaveraged or averaged (depending on the input fields)
- computed result for all nodes in this element. The result
- is computed on a given element scoping. If the input
- fields are mixed shell/solid, and the shell's layers are
- not specified as collapsed, then the fields are split by
- element shape and the output fields container has an
- elshape label."""
+ def _spec() -> Specification:
+ description = r"""Transforms an Elemental Nodal or Nodal field into an Elemental field.
+Each elemental value is the maximum difference between the unaveraged or
+averaged (depending on the input fields) computed result for all nodes
+in this element. The result is computed on a given element scoping. If
+the input fields are mixed shell/solid, and the shell’s layers are not
+specified as collapsed, then the fields are split by element shape and
+the output fields container has an elshape label.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -110,32 +106,25 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""The mesh region in this pin is used to
- perform the averaging, used if there
- is no fields support.""",
+ document=r"""The mesh region in this pin is used to perform the averaging, used if there is no fields support.""",
),
3: PinSpecification(
name="scoping",
type_names=["scoping", "scopings_container"],
optional=True,
- document="""Average only on these elements. if it is
- scoping container, the label must
- correspond to the one of the fields
- container.""",
+ document=r"""Average only on these elements. If it is scoping container, the label must correspond to the one of the fields container.""",
),
10: PinSpecification(
name="collapse_shell_layers",
type_names=["bool"],
optional=True,
- document="""If true, the data across different shell
- layers is averaged as well (default
- is false).""",
+ document=r"""If true, the data across different shell layers is averaged as well (default is false).""",
),
},
map_output_pin_spec={
@@ -143,14 +132,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -159,29 +148,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="elemental_difference_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalDifferenceFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalDifferenceFc
+ inputs:
+ An instance of InputsElementalDifferenceFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalDifferenceFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalDifferenceFc
+ outputs:
+ An instance of OutputsElementalDifferenceFc.
"""
return super().outputs
@@ -220,12 +216,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._collapse_shell_layers)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -238,16 +235,15 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- The mesh region in this pin is used to
- perform the averaging, used if there
- is no fields support.
+ The mesh region in this pin is used to perform the averaging, used if there is no fields support.
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -260,17 +256,15 @@ def mesh(self):
return self._mesh
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- Average only on these elements. if it is
- scoping container, the label must
- correspond to the one of the fields
- container.
+ Average only on these elements. If it is scoping container, the label must correspond to the one of the fields container.
- Parameters
- ----------
- my_scoping : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -283,16 +277,15 @@ def scoping(self):
return self._scoping
@property
- def collapse_shell_layers(self):
- """Allows to connect collapse_shell_layers input to the operator.
+ def collapse_shell_layers(self) -> Input:
+ r"""Allows to connect collapse_shell_layers input to the operator.
- If true, the data across different shell
- layers is averaged as well (default
- is false).
+ If true, the data across different shell layers is averaged as well (default is false).
- Parameters
- ----------
- my_collapse_shell_layers : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -325,18 +318,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.elemental_difference_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/averaging/elemental_fraction_fc.py b/src/ansys/dpf/core/operators/averaging/elemental_fraction_fc.py
index e43a0aa03a0..9c536869422 100644
--- a/src/ansys/dpf/core/operators/averaging/elemental_fraction_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/elemental_fraction_fc.py
@@ -4,44 +4,38 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elemental_fraction_fc(Operator):
- """Transforms Elemental Nodal fields into Elemental fields. Each
- elemental value is the fraction between the elemental difference
- and the entity average. The result is computed on a given
- element's scoping.
+ r"""Transforms Elemental Nodal fields into Elemental fields. Each elemental
+ value is the fraction between the elemental difference and the entity
+ average. The result is computed on a given element’s scoping.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh : MeshedRegion, optional
- The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.
- scoping : Scoping, optional
- Average only on these elements. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.
- denominator : FieldsContainer, optional
- If a fields container is set in this pin, it
- is used as the denominator of the
- fraction instead of
- entity_average_fc.
- collapse_shell_layers : bool, optional
- If true, the data across different shell
- layers is averaged as well (default
- is false).
+ fields_container: FieldsContainer
+ mesh: MeshedRegion, optional
+ The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.
+ scoping: Scoping, optional
+ Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields containers.
+ denominator: FieldsContainer, optional
+ If a fields container is set in this pin, it is used as the denominator of the fraction instead of entity_average_fc.
+ collapse_shell_layers: bool, optional
+ If true, the data across different shell layers is averaged as well (default is false).
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -100,11 +94,11 @@ def __init__(
self.inputs.collapse_shell_layers.connect(collapse_shell_layers)
@staticmethod
- def _spec():
- description = """Transforms Elemental Nodal fields into Elemental fields. Each
- elemental value is the fraction between the elemental
- difference and the entity average. The result is computed
- on a given element's scoping."""
+ def _spec() -> Specification:
+ description = r"""Transforms Elemental Nodal fields into Elemental fields. Each elemental
+value is the fraction between the elemental difference and the entity
+average. The result is computed on a given element’s scoping.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -112,41 +106,31 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.""",
+ document=r"""The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.""",
),
3: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""Average only on these elements. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.""",
+ document=r"""Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields containers.""",
),
6: PinSpecification(
name="denominator",
type_names=["fields_container"],
optional=True,
- document="""If a fields container is set in this pin, it
- is used as the denominator of the
- fraction instead of
- entity_average_fc.""",
+ document=r"""If a fields container is set in this pin, it is used as the denominator of the fraction instead of entity_average_fc.""",
),
10: PinSpecification(
name="collapse_shell_layers",
type_names=["bool"],
optional=True,
- document="""If true, the data across different shell
- layers is averaged as well (default
- is false).""",
+ document=r"""If true, the data across different shell layers is averaged as well (default is false).""",
),
},
map_output_pin_spec={
@@ -154,14 +138,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -170,29 +154,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="elemental_fraction_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalFractionFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalFractionFc
+ inputs:
+ An instance of InputsElementalFractionFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalFractionFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalFractionFc
+ outputs:
+ An instance of OutputsElementalFractionFc.
"""
return super().outputs
@@ -235,12 +226,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._collapse_shell_layers)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -253,16 +245,15 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.
+ The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -275,17 +266,15 @@ def mesh(self):
return self._mesh
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- Average only on these elements. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.
+ Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields containers.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -298,17 +287,15 @@ def scoping(self):
return self._scoping
@property
- def denominator(self):
- """Allows to connect denominator input to the operator.
+ def denominator(self) -> Input:
+ r"""Allows to connect denominator input to the operator.
- If a fields container is set in this pin, it
- is used as the denominator of the
- fraction instead of
- entity_average_fc.
+ If a fields container is set in this pin, it is used as the denominator of the fraction instead of entity_average_fc.
- Parameters
- ----------
- my_denominator : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -321,16 +308,15 @@ def denominator(self):
return self._denominator
@property
- def collapse_shell_layers(self):
- """Allows to connect collapse_shell_layers input to the operator.
+ def collapse_shell_layers(self) -> Input:
+ r"""Allows to connect collapse_shell_layers input to the operator.
- If true, the data across different shell
- layers is averaged as well (default
- is false).
+ If true, the data across different shell layers is averaged as well (default is false).
- Parameters
- ----------
- my_collapse_shell_layers : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -363,18 +349,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.elemental_fraction_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/averaging/elemental_mean.py b/src/ansys/dpf/core/operators/averaging/elemental_mean.py
index c91fd5e9be9..d3409afcc00 100644
--- a/src/ansys/dpf/core/operators/averaging/elemental_mean.py
+++ b/src/ansys/dpf/core/operators/averaging/elemental_mean.py
@@ -4,34 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elemental_mean(Operator):
- """Computes the average of a multi-entity field, (ElementalNodal ->
+ r"""Computes the average of a multi-entity field, (ElementalNodal ->
Elemental), (NodalElemental -> Nodal).
+
Parameters
----------
- field : Field
- collapse_shell_layers : bool, optional
- If true, shell layers are averaged as well
- (default is false).
- force_averaging : bool, optional
+ field: Field
+ collapse_shell_layers: bool, optional
+ If true, shell layers are averaged as well (default is false).
+ force_averaging: bool, optional
If true you average, if false you just sum.
- scoping : Scoping, optional
- Average only on these elements. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.
+ scoping: Scoping, optional
+ Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields containers.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -84,9 +85,10 @@ def __init__(
self.inputs.scoping.connect(scoping)
@staticmethod
- def _spec():
- description = """Computes the average of a multi-entity field, (ElementalNodal ->
- Elemental), (NodalElemental -> Nodal)."""
+ def _spec() -> Specification:
+ description = r"""Computes the average of a multi-entity field, (ElementalNodal ->
+Elemental), (NodalElemental -> Nodal).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -94,29 +96,25 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="collapse_shell_layers",
type_names=["bool"],
optional=True,
- document="""If true, shell layers are averaged as well
- (default is false).""",
+ document=r"""If true, shell layers are averaged as well (default is false).""",
),
2: PinSpecification(
name="force_averaging",
type_names=["bool"],
optional=True,
- document="""If true you average, if false you just sum.""",
+ document=r"""If true you average, if false you just sum.""",
),
3: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""Average only on these elements. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.""",
+ document=r"""Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields containers.""",
),
},
map_output_pin_spec={
@@ -124,14 +122,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -140,29 +138,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="entity_average", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalMean:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalMean
+ inputs:
+ An instance of InputsElementalMean.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalMean:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalMean
+ outputs:
+ An instance of OutputsElementalMean.
"""
return super().outputs
@@ -199,12 +204,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._scoping)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -217,15 +223,15 @@ def field(self):
return self._field
@property
- def collapse_shell_layers(self):
- """Allows to connect collapse_shell_layers input to the operator.
+ def collapse_shell_layers(self) -> Input:
+ r"""Allows to connect collapse_shell_layers input to the operator.
- If true, shell layers are averaged as well
- (default is false).
+ If true, shell layers are averaged as well (default is false).
- Parameters
- ----------
- my_collapse_shell_layers : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -238,14 +244,15 @@ def collapse_shell_layers(self):
return self._collapse_shell_layers
@property
- def force_averaging(self):
- """Allows to connect force_averaging input to the operator.
+ def force_averaging(self) -> Input:
+ r"""Allows to connect force_averaging input to the operator.
If true you average, if false you just sum.
- Parameters
- ----------
- my_force_averaging : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -258,17 +265,15 @@ def force_averaging(self):
return self._force_averaging
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- Average only on these elements. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.
+ Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields containers.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -299,18 +304,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.elemental_mean()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/averaging/elemental_mean_fc.py b/src/ansys/dpf/core/operators/averaging/elemental_mean_fc.py
index d5cb5512823..1cc9a3d0e33 100644
--- a/src/ansys/dpf/core/operators/averaging/elemental_mean_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/elemental_mean_fc.py
@@ -4,59 +4,48 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elemental_mean_fc(Operator):
- """Computes the average of a multi-entity container of fields,
- (ElementalNodal -> Elemental), (NodalElemental -> Nodal). If the
- input fields are mixed shell/solid and collapseShellLayers is
- false, then the fields could be split by element shape and the
- output fields container would have an elshape label depending on
- the e_shell_layer and merge_solid_shell inputs (if e_shell_layer
- is not specified, the fields are split; if it is specified, the
- fields can be split based on merge_solid_shell).If
- collapseShellLayers is true, all available shell layers are
- collapsed and shells and solid fields are always merged.
+ r"""Computes the average of a multi-entity container of fields,
+ (ElementalNodal -> Elemental), (NodalElemental -> Nodal). If the input
+ fields are mixed shell/solid and collapseShellLayers is false, then the
+ fields could be split by element shape and the output fields container
+ would have an elshape label depending on the e_shell_layer and
+ merge_solid_shell inputs (if e_shell_layer is not specified, the fields
+ are split; if it is specified, the fields can be split based on
+ merge_solid_shell).If collapseShellLayers is true, all available shell
+ layers are collapsed and shells and solid fields are always merged.
+
Parameters
----------
- fields_container : FieldsContainer
- collapse_shell_layers : bool, optional
- If true, the data across different shell
- layers is averaged as well (default
- is false).
- force_averaging : bool, optional
+ fields_container: FieldsContainer
+ collapse_shell_layers: bool, optional
+ If true, the data across different shell layers is averaged as well (default is false).
+ force_averaging: bool, optional
If true you average, if false you just sum.
- scoping : Scoping or ScopingsContainer, optional
- Average only on these elements. if it is a
- scoping container, the label must
- correspond to the one of the fields
- container.
- abstract_meshed_region : MeshedRegion or MeshesContainer, optional
- The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.
- merge_solid_shell : bool, optional
- For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). this pin only has
- an effect when collapse_shell_layers
- is false and a value for
- e_shell_layer is provided.
- e_shell_layer : int, optional
- 0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. this pin only has an
- effect when collapse_shell_layers is
- false.
+ scoping: Scoping or ScopingsContainer, optional
+ Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields container.
+ abstract_meshed_region: MeshedRegion or MeshesContainer, optional
+ The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.
+ merge_solid_shell: bool, optional
+ For shell/solid mixed fields, group in the same field all solids and shells (false by default). This pin only has an effect when collapse_shell_layers is false and a value for e_shell_layer is provided.
+ e_shell_layer: int, optional
+ 0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. This pin only has an effect when collapse_shell_layers is false.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -127,19 +116,17 @@ def __init__(
self.inputs.e_shell_layer.connect(e_shell_layer)
@staticmethod
- def _spec():
- description = """Computes the average of a multi-entity container of fields,
- (ElementalNodal -> Elemental), (NodalElemental -> Nodal).
- If the input fields are mixed shell/solid and
- collapseShellLayers is false, then the fields could be
- split by element shape and the output fields container
- would have an elshape label depending on the e_shell_layer
- and merge_solid_shell inputs (if e_shell_layer is not
- specified, the fields are split; if it is specified, the
- fields can be split based on merge_solid_shell).If
- collapseShellLayers is true, all available shell layers
- are collapsed and shells and solid fields are always
- merged."""
+ def _spec() -> Specification:
+ description = r"""Computes the average of a multi-entity container of fields,
+(ElementalNodal -> Elemental), (NodalElemental -> Nodal). If the input
+fields are mixed shell/solid and collapseShellLayers is false, then the
+fields could be split by element shape and the output fields container
+would have an elshape label depending on the e_shell_layer and
+merge_solid_shell inputs (if e_shell_layer is not specified, the fields
+are split; if it is specified, the fields can be split based on
+merge_solid_shell).If collapseShellLayers is true, all available shell
+layers are collapsed and shells and solid fields are always merged.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -147,58 +134,43 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="collapse_shell_layers",
type_names=["bool"],
optional=True,
- document="""If true, the data across different shell
- layers is averaged as well (default
- is false).""",
+ document=r"""If true, the data across different shell layers is averaged as well (default is false).""",
),
2: PinSpecification(
name="force_averaging",
type_names=["bool"],
optional=True,
- document="""If true you average, if false you just sum.""",
+ document=r"""If true you average, if false you just sum.""",
),
3: PinSpecification(
name="scoping",
type_names=["scoping", "scopings_container"],
optional=True,
- document="""Average only on these elements. if it is a
- scoping container, the label must
- correspond to the one of the fields
- container.""",
+ document=r"""Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields container.""",
),
4: PinSpecification(
name="abstract_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.""",
+ document=r"""The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.""",
),
26: PinSpecification(
name="merge_solid_shell",
type_names=["bool"],
optional=True,
- document="""For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). this pin only has
- an effect when collapse_shell_layers
- is false and a value for
- e_shell_layer is provided.""",
+ document=r"""For shell/solid mixed fields, group in the same field all solids and shells (false by default). This pin only has an effect when collapse_shell_layers is false and a value for e_shell_layer is provided.""",
),
27: PinSpecification(
name="e_shell_layer",
type_names=["int32"],
optional=True,
- document="""0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. this pin only has an
- effect when collapse_shell_layers is
- false.""",
+ document=r"""0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. This pin only has an effect when collapse_shell_layers is false.""",
),
},
map_output_pin_spec={
@@ -206,14 +178,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -222,29 +194,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="entity_average_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalMeanFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalMeanFc
+ inputs:
+ An instance of InputsElementalMeanFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalMeanFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalMeanFc
+ outputs:
+ An instance of OutputsElementalMeanFc.
"""
return super().outputs
@@ -299,12 +278,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._e_shell_layer)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -317,16 +297,15 @@ def fields_container(self):
return self._fields_container
@property
- def collapse_shell_layers(self):
- """Allows to connect collapse_shell_layers input to the operator.
+ def collapse_shell_layers(self) -> Input:
+ r"""Allows to connect collapse_shell_layers input to the operator.
- If true, the data across different shell
- layers is averaged as well (default
- is false).
+ If true, the data across different shell layers is averaged as well (default is false).
- Parameters
- ----------
- my_collapse_shell_layers : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -339,14 +318,15 @@ def collapse_shell_layers(self):
return self._collapse_shell_layers
@property
- def force_averaging(self):
- """Allows to connect force_averaging input to the operator.
+ def force_averaging(self) -> Input:
+ r"""Allows to connect force_averaging input to the operator.
If true you average, if false you just sum.
- Parameters
- ----------
- my_force_averaging : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -359,17 +339,15 @@ def force_averaging(self):
return self._force_averaging
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- Average only on these elements. if it is a
- scoping container, the label must
- correspond to the one of the fields
- container.
+ Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields container.
- Parameters
- ----------
- my_scoping : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -382,16 +360,15 @@ def scoping(self):
return self._scoping
@property
- def abstract_meshed_region(self):
- """Allows to connect abstract_meshed_region input to the operator.
+ def abstract_meshed_region(self) -> Input:
+ r"""Allows to connect abstract_meshed_region input to the operator.
- The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.
+ The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.
- Parameters
- ----------
- my_abstract_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -404,19 +381,15 @@ def abstract_meshed_region(self):
return self._abstract_meshed_region
@property
- def merge_solid_shell(self):
- """Allows to connect merge_solid_shell input to the operator.
+ def merge_solid_shell(self) -> Input:
+ r"""Allows to connect merge_solid_shell input to the operator.
- For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). this pin only has
- an effect when collapse_shell_layers
- is false and a value for
- e_shell_layer is provided.
+ For shell/solid mixed fields, group in the same field all solids and shells (false by default). This pin only has an effect when collapse_shell_layers is false and a value for e_shell_layer is provided.
- Parameters
- ----------
- my_merge_solid_shell : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -429,17 +402,15 @@ def merge_solid_shell(self):
return self._merge_solid_shell
@property
- def e_shell_layer(self):
- """Allows to connect e_shell_layer input to the operator.
+ def e_shell_layer(self) -> Input:
+ r"""Allows to connect e_shell_layer input to the operator.
- 0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. this pin only has an
- effect when collapse_shell_layers is
- false.
+ 0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. This pin only has an effect when collapse_shell_layers is false.
- Parameters
- ----------
- my_e_shell_layer : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -470,18 +441,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.elemental_mean_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py
index 9ee5487f710..fc3636fe5a3 100644
--- a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py
+++ b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal.py
@@ -4,45 +4,41 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elemental_nodal_to_nodal(Operator):
- """Transforms an Elemental Nodal field into a Nodal field using an
- averaging process. The result is computed on a given node's
- scoping.
+ r"""Transforms an Elemental Nodal field into a Nodal field using an
+ averaging process. The result is computed on a given node’s scoping.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- mesh_scoping : Scoping, optional
- Average only on these entities
- should_average : bool, optional
- Each nodal value is divided by the number of
- elements linked to this node (default
- is true for discrete quantities).
- extend_to_mid_nodes : bool, optional
- Compute mid nodes (when available) by
- averaging the neighbour primary
- nodes.
- extend_weights_to_mid_nodes : bool, optional
- Extends weights to mid nodes (when
- available). default is false.
- mesh : MeshedRegion, optional
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ mesh_scoping: Scoping, optional
+ average only on these entities
+ should_average: bool, optional
+ Each nodal value is divided by the number of elements linked to this node (default is true for discrete quantities).
+ extend_to_mid_nodes: bool, optional
+ Compute mid nodes (when available) by averaging the neighbour primary nodes.
+ extend_weights_to_mid_nodes: bool, optional
+ Extends weights to mid nodes (when available). Default is false.
+ mesh: MeshedRegion, optional
Returns
-------
- field : Field
- weight : PropertyField
- Provides the number of times it was found in
- the elemental nodal field, for each
- node. can be used to average later.
+ field: Field
+ weight: PropertyField
+ Provides the number of times it was found in the elemental nodal field, for each node. Can be used to average later.
Examples
--------
@@ -108,10 +104,10 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Transforms an Elemental Nodal field into a Nodal field using an
- averaging process. The result is computed on a given
- node's scoping."""
+ def _spec() -> Specification:
+ description = r"""Transforms an Elemental Nodal field into a Nodal field using an
+averaging process. The result is computed on a given node’s scoping.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -119,43 +115,37 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""Average only on these entities""",
+ document=r"""average only on these entities""",
),
2: PinSpecification(
name="should_average",
type_names=["bool"],
optional=True,
- document="""Each nodal value is divided by the number of
- elements linked to this node (default
- is true for discrete quantities).""",
+ document=r"""Each nodal value is divided by the number of elements linked to this node (default is true for discrete quantities).""",
),
4: PinSpecification(
name="extend_to_mid_nodes",
type_names=["bool"],
optional=True,
- document="""Compute mid nodes (when available) by
- averaging the neighbour primary
- nodes.""",
+ document=r"""Compute mid nodes (when available) by averaging the neighbour primary nodes.""",
),
5: PinSpecification(
name="extend_weights_to_mid_nodes",
type_names=["bool"],
optional=True,
- document="""Extends weights to mid nodes (when
- available). default is false.""",
+ document=r"""Extends weights to mid nodes (when available). Default is false.""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -163,22 +153,20 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="weight",
type_names=["property_field"],
optional=False,
- document="""Provides the number of times it was found in
- the elemental nodal field, for each
- node. can be used to average later.""",
+ document=r"""Provides the number of times it was found in the elemental nodal field, for each node. Can be used to average later.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -187,29 +175,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="elemental_nodal_To_nodal", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalNodalToNodal:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalNodalToNodal
+ inputs:
+ An instance of InputsElementalNodalToNodal.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalNodalToNodal:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalNodalToNodal
+ outputs:
+ An instance of OutputsElementalNodalToNodal.
"""
return super().outputs
@@ -260,15 +255,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -281,14 +276,15 @@ def field(self):
return self._field
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Average only on these entities
+ average only on these entities
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -301,16 +297,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def should_average(self):
- """Allows to connect should_average input to the operator.
+ def should_average(self) -> Input:
+ r"""Allows to connect should_average input to the operator.
- Each nodal value is divided by the number of
- elements linked to this node (default
- is true for discrete quantities).
+ Each nodal value is divided by the number of elements linked to this node (default is true for discrete quantities).
- Parameters
- ----------
- my_should_average : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -323,16 +318,15 @@ def should_average(self):
return self._should_average
@property
- def extend_to_mid_nodes(self):
- """Allows to connect extend_to_mid_nodes input to the operator.
+ def extend_to_mid_nodes(self) -> Input:
+ r"""Allows to connect extend_to_mid_nodes input to the operator.
- Compute mid nodes (when available) by
- averaging the neighbour primary
- nodes.
+ Compute mid nodes (when available) by averaging the neighbour primary nodes.
- Parameters
- ----------
- my_extend_to_mid_nodes : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -345,15 +339,15 @@ def extend_to_mid_nodes(self):
return self._extend_to_mid_nodes
@property
- def extend_weights_to_mid_nodes(self):
- """Allows to connect extend_weights_to_mid_nodes input to the operator.
+ def extend_weights_to_mid_nodes(self) -> Input:
+ r"""Allows to connect extend_weights_to_mid_nodes input to the operator.
- Extends weights to mid nodes (when
- available). default is false.
+ Extends weights to mid nodes (when available). Default is false.
- Parameters
- ----------
- my_extend_weights_to_mid_nodes : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -366,12 +360,13 @@ def extend_weights_to_mid_nodes(self):
return self._extend_weights_to_mid_nodes
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -405,35 +400,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._weight)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.elemental_nodal_to_nodal()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
@property
- def weight(self):
- """Allows to get weight output of the operator
+ def weight(self) -> Output:
+ r"""Allows to get weight output of the operator
+
+ Provides the number of times it was found in the elemental nodal field, for each node. Can be used to average later.
Returns
- ----------
- my_weight : PropertyField
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.elemental_nodal_to_nodal()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_weight = op.outputs.weight()
- """ # noqa: E501
+ """
return self._weight
diff --git a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental.py b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental.py
index f6e89202cc2..2161f8be31e 100644
--- a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental.py
+++ b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental.py
@@ -4,27 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elemental_nodal_to_nodal_elemental(Operator):
- """Transforms an Elemental Nodal field to Nodal Elemental. The result is
- computed on a given node's scoping.
+ r"""Transforms an Elemental Nodal field to Nodal Elemental. The result is
+ computed on a given node’s scoping.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- mesh_scoping : Scoping, optional
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ mesh_scoping: Scoping, optional
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -61,9 +65,10 @@ def __init__(self, field=None, mesh_scoping=None, config=None, server=None):
self.inputs.mesh_scoping.connect(mesh_scoping)
@staticmethod
- def _spec():
- description = """Transforms an Elemental Nodal field to Nodal Elemental. The result is
- computed on a given node's scoping."""
+ def _spec() -> Specification:
+ description = r"""Transforms an Elemental Nodal field to Nodal Elemental. The result is
+computed on a given node’s scoping.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -71,14 +76,13 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -86,14 +90,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -102,31 +106,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="ElementalNodal_To_NodalElemental", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalNodalToNodalElemental:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalNodalToNodalElemental
+ inputs:
+ An instance of InputsElementalNodalToNodalElemental.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalNodalToNodalElemental:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalNodalToNodalElemental
+ outputs:
+ An instance of OutputsElementalNodalToNodalElemental.
"""
return super().outputs
@@ -157,15 +168,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh_scoping)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -178,12 +189,13 @@ def field(self):
return self._field
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -216,18 +228,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental_fc.py b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental_fc.py
index 6a7a1d33e57..54db22e0445 100644
--- a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_elemental_fc.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elemental_nodal_to_nodal_elemental_fc(Operator):
- """Transforms Elemental Nodal fields to Nodal Elemental fields. The
- result is computed on a given node's scoping.
+ r"""Transforms Elemental Nodal fields to Nodal Elemental fields. The result
+ is computed on a given node’s scoping.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh_scoping : Scoping, optional
+ fields_container: FieldsContainer
+ mesh_scoping: Scoping, optional
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -61,9 +66,10 @@ def __init__(
self.inputs.mesh_scoping.connect(mesh_scoping)
@staticmethod
- def _spec():
- description = """Transforms Elemental Nodal fields to Nodal Elemental fields. The
- result is computed on a given node's scoping."""
+ def _spec() -> Specification:
+ description = r"""Transforms Elemental Nodal fields to Nodal Elemental fields. The result
+is computed on a given node’s scoping.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -71,13 +77,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -85,14 +91,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -101,31 +107,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="ElementalNodal_To_NodalElemental_fc", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalNodalToNodalElementalFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalNodalToNodalElementalFc
+ inputs:
+ An instance of InputsElementalNodalToNodalElementalFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalNodalToNodalElementalFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalNodalToNodalElementalFc
+ outputs:
+ An instance of OutputsElementalNodalToNodalElementalFc.
"""
return super().outputs
@@ -156,12 +169,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh_scoping)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -174,12 +188,13 @@ def fields_container(self):
return self._fields_container
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -212,18 +227,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.elemental_nodal_to_nodal_elemental_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py
index e8c4331e17f..babc7e43a4a 100644
--- a/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/elemental_nodal_to_nodal_fc.py
@@ -4,64 +4,49 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elemental_nodal_to_nodal_fc(Operator):
- """Transforms Elemental Nodal fields into Nodal fields using an averaging
- process. The result is computed on a given node's scoping. If the
- input fields are mixed shell/solid, then the fields are split by
- element shape and the output fields container has an elshape label
- depending on the merge_solid_shell input.
+ r"""Transforms Elemental Nodal fields into Nodal fields using an averaging
+ process. The result is computed on a given node’s scoping. If the input
+ fields are mixed shell/solid, then the fields are split by element shape
+ and the output fields container has an elshape label depending on the
+ merge_solid_shell input.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh : MeshedRegion or MeshesContainer, optional
- The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.
- should_average : bool, optional
- Each nodal value is divided by the number of
- elements linked to this node (default
- is true for discrete quantities).
- scoping : Scoping or ScopingsContainer, optional
- Average only on these nodes. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.
- extend_to_mid_nodes : bool, optional
- Compute mid nodes (when available) by
- averaging the neighbour primary
- nodes.
- extend_weights_to_mid_nodes : bool, optional
- Extends weights to mid nodes (when
- available). default is false.
- merge_solid_shell : bool, optional
- For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). if this pin is
- true, a shell_layer needs to be
- specified.
- shell_layer : int, optional
- 0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. if merge_solid_shell is
- true, this pin needs to be specified
- to a value that extracts only one
- layer (top, bottom or mid).
+ fields_container: FieldsContainer
+ mesh: MeshedRegion or MeshesContainer, optional
+ The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.
+ should_average: bool, optional
+ Each nodal value is divided by the number of elements linked to this node (default is true for discrete quantities).
+ scoping: Scoping or ScopingsContainer, optional
+ Average only on these nodes. If it is a scoping container, the label must correspond to the one of the fields containers.
+ extend_to_mid_nodes: bool, optional
+ Compute mid nodes (when available) by averaging the neighbour primary nodes.
+ extend_weights_to_mid_nodes: bool, optional
+ Extends weights to mid nodes (when available). Default is false.
+ merge_solid_shell: bool, optional
+ For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true, a shell_layer needs to be specified.
+ shell_layer: int, optional
+ 0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid).
Returns
-------
- fields_container : FieldsContainer
- weights : Class Dataprocessing::Dpftypecollection<Class
+ fields_container: FieldsContainer
+ weights: Class Dataprocessing::Dpftypecollection<Class
Dataprocessing::Cpropertyfield>
- Gives for each node, the number of times it
- was found in the elemental nodal
- field. can be used to average later.
+ Gives for each node, the number of times it was found in the Elemental Nodal field. Can be used to average later.
Examples
--------
@@ -141,13 +126,13 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Transforms Elemental Nodal fields into Nodal fields using an averaging
- process. The result is computed on a given node's scoping.
- If the input fields are mixed shell/solid, then the fields
- are split by element shape and the output fields container
- has an elshape label depending on the merge_solid_shell
- input."""
+ def _spec() -> Specification:
+ description = r"""Transforms Elemental Nodal fields into Nodal fields using an averaging
+process. The result is computed on a given node’s scoping. If the input
+fields are mixed shell/solid, then the fields are split by element shape
+and the output fields container has an elshape label depending on the
+merge_solid_shell input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -155,67 +140,49 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.""",
+ document=r"""The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.""",
),
2: PinSpecification(
name="should_average",
type_names=["bool"],
optional=True,
- document="""Each nodal value is divided by the number of
- elements linked to this node (default
- is true for discrete quantities).""",
+ document=r"""Each nodal value is divided by the number of elements linked to this node (default is true for discrete quantities).""",
),
3: PinSpecification(
name="scoping",
type_names=["scoping", "scopings_container"],
optional=True,
- document="""Average only on these nodes. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.""",
+ document=r"""Average only on these nodes. If it is a scoping container, the label must correspond to the one of the fields containers.""",
),
4: PinSpecification(
name="extend_to_mid_nodes",
type_names=["bool"],
optional=True,
- document="""Compute mid nodes (when available) by
- averaging the neighbour primary
- nodes.""",
+ document=r"""Compute mid nodes (when available) by averaging the neighbour primary nodes.""",
),
5: PinSpecification(
name="extend_weights_to_mid_nodes",
type_names=["bool"],
optional=True,
- document="""Extends weights to mid nodes (when
- available). default is false.""",
+ document=r"""Extends weights to mid nodes (when available). Default is false.""",
),
26: PinSpecification(
name="merge_solid_shell",
type_names=["bool"],
optional=True,
- document="""For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). if this pin is
- true, a shell_layer needs to be
- specified.""",
+ document=r"""For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true, a shell_layer needs to be specified.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. if merge_solid_shell is
- true, this pin needs to be specified
- to a value that extracts only one
- layer (top, bottom or mid).""",
+ document=r"""0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid).""",
),
},
map_output_pin_spec={
@@ -223,7 +190,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="weights",
@@ -231,16 +198,14 @@ def _spec():
"class dataProcessing::DpfTypeCollection"
],
optional=False,
- document="""Gives for each node, the number of times it
- was found in the elemental nodal
- field. can be used to average later.""",
+ document=r"""Gives for each node, the number of times it was found in the Elemental Nodal field. Can be used to average later.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -249,31 +214,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="elemental_nodal_To_nodal_fc", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalNodalToNodalFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalNodalToNodalFc
+ inputs:
+ An instance of InputsElementalNodalToNodalFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalNodalToNodalFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalNodalToNodalFc
+ outputs:
+ An instance of OutputsElementalNodalToNodalFc.
"""
return super().outputs
@@ -338,12 +310,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -356,16 +329,15 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.
+ The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -378,16 +350,15 @@ def mesh(self):
return self._mesh
@property
- def should_average(self):
- """Allows to connect should_average input to the operator.
+ def should_average(self) -> Input:
+ r"""Allows to connect should_average input to the operator.
- Each nodal value is divided by the number of
- elements linked to this node (default
- is true for discrete quantities).
+ Each nodal value is divided by the number of elements linked to this node (default is true for discrete quantities).
- Parameters
- ----------
- my_should_average : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -400,17 +371,15 @@ def should_average(self):
return self._should_average
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- Average only on these nodes. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.
+ Average only on these nodes. If it is a scoping container, the label must correspond to the one of the fields containers.
- Parameters
- ----------
- my_scoping : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -423,16 +392,15 @@ def scoping(self):
return self._scoping
@property
- def extend_to_mid_nodes(self):
- """Allows to connect extend_to_mid_nodes input to the operator.
+ def extend_to_mid_nodes(self) -> Input:
+ r"""Allows to connect extend_to_mid_nodes input to the operator.
- Compute mid nodes (when available) by
- averaging the neighbour primary
- nodes.
+ Compute mid nodes (when available) by averaging the neighbour primary nodes.
- Parameters
- ----------
- my_extend_to_mid_nodes : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -445,15 +413,15 @@ def extend_to_mid_nodes(self):
return self._extend_to_mid_nodes
@property
- def extend_weights_to_mid_nodes(self):
- """Allows to connect extend_weights_to_mid_nodes input to the operator.
+ def extend_weights_to_mid_nodes(self) -> Input:
+ r"""Allows to connect extend_weights_to_mid_nodes input to the operator.
- Extends weights to mid nodes (when
- available). default is false.
+ Extends weights to mid nodes (when available). Default is false.
- Parameters
- ----------
- my_extend_weights_to_mid_nodes : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -466,18 +434,15 @@ def extend_weights_to_mid_nodes(self):
return self._extend_weights_to_mid_nodes
@property
- def merge_solid_shell(self):
- """Allows to connect merge_solid_shell input to the operator.
+ def merge_solid_shell(self) -> Input:
+ r"""Allows to connect merge_solid_shell input to the operator.
- For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). if this pin is
- true, a shell_layer needs to be
- specified.
+ For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true, a shell_layer needs to be specified.
- Parameters
- ----------
- my_merge_solid_shell : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -490,18 +455,15 @@ def merge_solid_shell(self):
return self._merge_solid_shell
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- 0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. if merge_solid_shell is
- true, this pin needs to be specified
- to a value that extracts only one
- layer (top, bottom or mid).
+ 0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid).
- Parameters
- ----------
- my_shell_layer : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -537,36 +499,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._weights)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
@property
- def weights(self):
- """Allows to get weights output of the operator
+ def weights(self) -> Output:
+ r"""Allows to get weights output of the operator
+
+ Gives for each node, the number of times it was found in the Elemental Nodal field. Can be used to average later.
Returns
- ----------
- my_weights : Class Dataprocessing::Dpftypecollection<Class
- Dataprocessing::Cpropertyfield>
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.elemental_nodal_to_nodal_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_weights = op.outputs.weights()
- """ # noqa: E501
+ """
return self._weights
diff --git a/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal.py b/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal.py
index da4c7d0d52d..1daf92f24be 100644
--- a/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal.py
+++ b/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal.py
@@ -4,28 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elemental_to_elemental_nodal(Operator):
- """Transforms an Elemental field to an Elemental Nodal field.
+ r"""Transforms an Elemental field to an Elemental Nodal field.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- mesh_scoping : Scoping, optional
- Average only on these entities
- mesh : MeshedRegion, optional
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ mesh_scoping: Scoping, optional
+ average only on these entities
+ mesh: MeshedRegion, optional
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -69,8 +73,9 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Transforms an Elemental field to an Elemental Nodal field."""
+ def _spec() -> Specification:
+ description = r"""Transforms an Elemental field to an Elemental Nodal field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -78,20 +83,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""Average only on these entities""",
+ document=r"""average only on these entities""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -99,14 +103,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -115,31 +119,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="elemental_to_elemental_nodal", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalToElementalNodal:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalToElementalNodal
+ inputs:
+ An instance of InputsElementalToElementalNodal.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalToElementalNodal:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalToElementalNodal
+ outputs:
+ An instance of OutputsElementalToElementalNodal.
"""
return super().outputs
@@ -174,15 +185,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -195,14 +206,15 @@ def field(self):
return self._field
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Average only on these entities
+ average only on these entities
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -215,12 +227,13 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -251,18 +264,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.elemental_to_elemental_nodal()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal_fc.py b/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal_fc.py
index ced8d1dac57..f4a25b42017 100644
--- a/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/elemental_to_elemental_nodal_fc.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elemental_to_elemental_nodal_fc(Operator):
- """Transforms Elemental field to Elemental Nodal field.
+ r"""Transforms Elemental field to Elemental Nodal field.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh : MeshedRegion, optional
- mesh_scoping : Scoping, optional
+ fields_container: FieldsContainer
+ mesh: MeshedRegion, optional
+ mesh_scoping: Scoping, optional
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -71,8 +76,9 @@ def __init__(
self.inputs.mesh_scoping.connect(mesh_scoping)
@staticmethod
- def _spec():
- description = """Transforms Elemental field to Elemental Nodal field."""
+ def _spec() -> Specification:
+ description = r"""Transforms Elemental field to Elemental Nodal field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -80,19 +86,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -100,14 +106,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -116,31 +122,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="elemental_to_elemental_nodal_fc", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalToElementalNodalFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalToElementalNodalFc
+ inputs:
+ An instance of InputsElementalToElementalNodalFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalToElementalNodalFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalToElementalNodalFc
+ outputs:
+ An instance of OutputsElementalToElementalNodalFc.
"""
return super().outputs
@@ -177,12 +190,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh_scoping)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -195,12 +209,13 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -213,12 +228,13 @@ def mesh(self):
return self._mesh
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -251,18 +267,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.elemental_to_elemental_nodal_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/averaging/elemental_to_nodal.py b/src/ansys/dpf/core/operators/averaging/elemental_to_nodal.py
index f4176ad1ab4..44b1e53ce2d 100644
--- a/src/ansys/dpf/core/operators/averaging/elemental_to_nodal.py
+++ b/src/ansys/dpf/core/operators/averaging/elemental_to_nodal.py
@@ -4,45 +4,52 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elemental_to_nodal(Operator):
- """Transforms an Elemental field to a Nodal field. The result is computed
- on a given node's scoping. 1. For a finite element mesh, the
- value on a node is the average of the values of the neighbour
- elements. 2. For a volume finite volume mesh, the agorithm is :
- - For each node, compute interpolation weights for the cells
- connected to it based on the Frink's Laplacian method. -
- If the determinant of the I matrix is zero, switch to an inverse
- distance weighted average. - If not, compute the Frink
- weights and apply the Holmes' weight clip. - If the
- clipping produces a large overshoot, inverse volume weighted
- average is used.. 3. For a face finite volume mesh inverse
- distance weighted average is used.
+ r"""Transforms an Elemental field to a Nodal field. The result is computed
+ on a given node’s scoping.
+
+ 1. For a finite element mesh, the value on a node is the average of the
+ values of the neighbour elements.
+
+ 2. For a volume finite volume mesh, the agorithm is :
+
+ - For each node, compute interpolation weights for the cells
+ connected to it based on the Frink’s Laplacian method.
+ - If the determinant of the I matrix is zero, switch to an inverse
+ distance weighted average.
+ - If not, compute the Frink weights and apply the Holmes’ weight
+ clip.
+ - If the clipping produces a large overshoot, inverse volume weighted
+ average is used..
+
+ 3. For a face finite volume mesh inverse distance weighted average is
+ used.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- mesh_scoping : Scoping, optional
- force_averaging : int, optional
- Averaging on nodes is used if this pin is set
- to 1 (default is 1 for integrated
- results and 0 for discrete ones).
- algorithm : int, optional
- Forces the usage of algorithm 1, 2 or 3
- (default is chosen based on the type
- of mesh).
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ mesh_scoping: Scoping, optional
+ force_averaging: int, optional
+ Averaging on nodes is used if this pin is set to 1 (default is 1 for integrated results and 0 for discrete ones).
+ algorithm: int, optional
+ Forces the usage of algorithm 1, 2 or 3 (default is chosen based on the type of mesh).
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -95,20 +102,27 @@ def __init__(
self.inputs.algorithm.connect(algorithm)
@staticmethod
- def _spec():
- description = """Transforms an Elemental field to a Nodal field. The result is computed
- on a given node's scoping. 1. For a finite element mesh,
- the value on a node is the average of the values of the
- neighbour elements. 2. For a volume finite volume mesh,
- the agorithm is : - For each node, compute
- interpolation weights for the cells connected to it based
- on the Frink's Laplacian method. - If the
- determinant of the I matrix is zero, switch to an inverse
- distance weighted average. - If not, compute the
- Frink weights and apply the Holmes' weight clip. -
- If the clipping produces a large overshoot, inverse volume
- weighted average is used.. 3. For a face finite volume
- mesh inverse distance weighted average is used."""
+ def _spec() -> Specification:
+ description = r"""Transforms an Elemental field to a Nodal field. The result is computed
+on a given node’s scoping.
+
+1. For a finite element mesh, the value on a node is the average of the
+ values of the neighbour elements.
+
+2. For a volume finite volume mesh, the agorithm is :
+
+ - For each node, compute interpolation weights for the cells
+ connected to it based on the Frink’s Laplacian method.
+ - If the determinant of the I matrix is zero, switch to an inverse
+ distance weighted average.
+ - If not, compute the Frink weights and apply the Holmes’ weight
+ clip.
+ - If the clipping produces a large overshoot, inverse volume weighted
+ average is used..
+
+3. For a face finite volume mesh inverse distance weighted average is
+ used.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -116,30 +130,25 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="force_averaging",
type_names=["int32"],
optional=True,
- document="""Averaging on nodes is used if this pin is set
- to 1 (default is 1 for integrated
- results and 0 for discrete ones).""",
+ document=r"""Averaging on nodes is used if this pin is set to 1 (default is 1 for integrated results and 0 for discrete ones).""",
),
200: PinSpecification(
name="algorithm",
type_names=["int32"],
optional=True,
- document="""Forces the usage of algorithm 1, 2 or 3
- (default is chosen based on the type
- of mesh).""",
+ document=r"""Forces the usage of algorithm 1, 2 or 3 (default is chosen based on the type of mesh).""",
),
},
map_output_pin_spec={
@@ -147,14 +156,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -163,29 +172,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="elemental_to_nodal", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalToNodal:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalToNodal
+ inputs:
+ An instance of InputsElementalToNodal.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalToNodal:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalToNodal
+ outputs:
+ An instance of OutputsElementalToNodal.
"""
return super().outputs
@@ -222,15 +238,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._algorithm)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -243,12 +259,13 @@ def field(self):
return self._field
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -261,16 +278,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def force_averaging(self):
- """Allows to connect force_averaging input to the operator.
+ def force_averaging(self) -> Input:
+ r"""Allows to connect force_averaging input to the operator.
- Averaging on nodes is used if this pin is set
- to 1 (default is 1 for integrated
- results and 0 for discrete ones).
+ Averaging on nodes is used if this pin is set to 1 (default is 1 for integrated results and 0 for discrete ones).
- Parameters
- ----------
- my_force_averaging : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -283,16 +299,15 @@ def force_averaging(self):
return self._force_averaging
@property
- def algorithm(self):
- """Allows to connect algorithm input to the operator.
+ def algorithm(self) -> Input:
+ r"""Allows to connect algorithm input to the operator.
- Forces the usage of algorithm 1, 2 or 3
- (default is chosen based on the type
- of mesh).
+ Forces the usage of algorithm 1, 2 or 3 (default is chosen based on the type of mesh).
- Parameters
- ----------
- my_algorithm : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -323,18 +338,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.elemental_to_nodal()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/averaging/elemental_to_nodal_fc.py b/src/ansys/dpf/core/operators/averaging/elemental_to_nodal_fc.py
index d85949bce60..b4b3514dd33 100644
--- a/src/ansys/dpf/core/operators/averaging/elemental_to_nodal_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/elemental_to_nodal_fc.py
@@ -4,44 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elemental_to_nodal_fc(Operator):
- """Transforms Elemental Nodal fields to Nodal fields. The result is
- computed on a given node's scoping.1. For a finite element mesh,
- the value on a node is the average of the values of the neighbour
- elements. 2. For a finite volume mesh, the agorithm is : - For
- each node, compute interpolation weights for the cells connected
- to it based on the Frink's Laplacian method. - If the
- determinant of the I matrix is zero, switch to an inverse distance
- weighted average. - If not, compute the Frink weights and
- apply the Holmes' weight clip. - If the clipping produces
- a large overshoot, inverse volume weighted average is used.. 3.
- For a face finite volume mesh inverse distance weighted average is
- used.
+ r"""Transforms Elemental Nodal fields to Nodal fields. The result is
+ computed on a given node’s scoping.1. For a finite element mesh, the
+ value on a node is the average of the values of the neighbour elements.
+
+ 2. For a finite volume mesh, the agorithm is :
+
+ - For each node, compute interpolation weights for the cells
+ connected to it based on the Frink’s Laplacian method.
+ - If the determinant of the I matrix is zero, switch to an inverse
+ distance weighted average.
+ - If not, compute the Frink weights and apply the Holmes’ weight
+ clip.
+ - If the clipping produces a large overshoot, inverse volume weighted
+ average is used..
+
+ 3. For a face finite volume mesh inverse distance weighted average is
+ used.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh : MeshedRegion or MeshesContainer, optional
- force_averaging : int, optional
- Averaging on nodes is used if this pin is set
- to 1 (default is 1 for integrated
- results and 0 for discrete ones).
- mesh_scoping : Scoping or ScopingsContainer, optional
- algorithm : int, optional
- Forces the usage of algorithm 1, 2 or 3
- (default is chosen based on the type
- of mesh).
+ fields_container: FieldsContainer
+ mesh: MeshedRegion or MeshesContainer, optional
+ force_averaging: int, optional
+ Averaging on nodes is used if this pin is set to 1 (default is 1 for integrated results and 0 for discrete ones).
+ mesh_scoping: Scoping or ScopingsContainer, optional
+ algorithm: int, optional
+ Forces the usage of algorithm 1, 2 or 3 (default is chosen based on the type of mesh).
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -100,20 +106,25 @@ def __init__(
self.inputs.algorithm.connect(algorithm)
@staticmethod
- def _spec():
- description = """Transforms Elemental Nodal fields to Nodal fields. The result is
- computed on a given node's scoping.1. For a finite element
- mesh, the value on a node is the average of the values of
- the neighbour elements. 2. For a finite volume mesh, the
- agorithm is : - For each node, compute interpolation
- weights for the cells connected to it based on the
- Frink's Laplacian method. - If the determinant of
- the I matrix is zero, switch to an inverse distance
- weighted average. - If not, compute the Frink
- weights and apply the Holmes' weight clip. - If
- the clipping produces a large overshoot, inverse volume
- weighted average is used.. 3. For a face finite volume
- mesh inverse distance weighted average is used."""
+ def _spec() -> Specification:
+ description = r"""Transforms Elemental Nodal fields to Nodal fields. The result is
+computed on a given node’s scoping.1. For a finite element mesh, the
+value on a node is the average of the values of the neighbour elements.
+
+2. For a finite volume mesh, the agorithm is :
+
+ - For each node, compute interpolation weights for the cells
+ connected to it based on the Frink’s Laplacian method.
+ - If the determinant of the I matrix is zero, switch to an inverse
+ distance weighted average.
+ - If not, compute the Frink weights and apply the Holmes’ weight
+ clip.
+ - If the clipping produces a large overshoot, inverse volume weighted
+ average is used..
+
+3. For a face finite volume mesh inverse distance weighted average is
+ used.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -121,35 +132,31 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="force_averaging",
type_names=["int32"],
optional=True,
- document="""Averaging on nodes is used if this pin is set
- to 1 (default is 1 for integrated
- results and 0 for discrete ones).""",
+ document=r"""Averaging on nodes is used if this pin is set to 1 (default is 1 for integrated results and 0 for discrete ones).""",
),
3: PinSpecification(
name="mesh_scoping",
type_names=["scoping", "scopings_container"],
optional=True,
- document="""""",
+ document=r"""""",
),
200: PinSpecification(
name="algorithm",
type_names=["int32"],
optional=True,
- document="""Forces the usage of algorithm 1, 2 or 3
- (default is chosen based on the type
- of mesh).""",
+ document=r"""Forces the usage of algorithm 1, 2 or 3 (default is chosen based on the type of mesh).""",
),
},
map_output_pin_spec={
@@ -157,14 +164,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -173,29 +180,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="elemental_to_nodal_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalToNodalFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalToNodalFc
+ inputs:
+ An instance of InputsElementalToNodalFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalToNodalFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalToNodalFc
+ outputs:
+ An instance of OutputsElementalToNodalFc.
"""
return super().outputs
@@ -242,12 +256,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._algorithm)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -260,12 +275,13 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -278,16 +294,15 @@ def mesh(self):
return self._mesh
@property
- def force_averaging(self):
- """Allows to connect force_averaging input to the operator.
+ def force_averaging(self) -> Input:
+ r"""Allows to connect force_averaging input to the operator.
- Averaging on nodes is used if this pin is set
- to 1 (default is 1 for integrated
- results and 0 for discrete ones).
+ Averaging on nodes is used if this pin is set to 1 (default is 1 for integrated results and 0 for discrete ones).
- Parameters
- ----------
- my_force_averaging : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -300,12 +315,13 @@ def force_averaging(self):
return self._force_averaging
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -318,16 +334,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def algorithm(self):
- """Allows to connect algorithm input to the operator.
+ def algorithm(self) -> Input:
+ r"""Allows to connect algorithm input to the operator.
- Forces the usage of algorithm 1, 2 or 3
- (default is chosen based on the type
- of mesh).
+ Forces the usage of algorithm 1, 2 or 3 (default is chosen based on the type of mesh).
- Parameters
- ----------
- my_algorithm : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -360,18 +375,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.elemental_to_nodal_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py b/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py
index 174d063cfdd..f2514482e28 100644
--- a/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py
+++ b/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes.py
@@ -4,27 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class extend_to_mid_nodes(Operator):
- """Extends an Elemental Nodal or Nodal field defined on corner nodes to a
+ r"""Extends an Elemental Nodal or Nodal field defined on corner nodes to a
field defined also on the mid nodes.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- mesh : MeshedRegion, optional
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ mesh: MeshedRegion, optional
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -59,9 +63,10 @@ def __init__(self, field=None, mesh=None, config=None, server=None):
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Extends an Elemental Nodal or Nodal field defined on corner nodes to a
- field defined also on the mid nodes."""
+ def _spec() -> Specification:
+ description = r"""Extends an Elemental Nodal or Nodal field defined on corner nodes to a
+field defined also on the mid nodes.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -69,14 +74,13 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -84,14 +88,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -100,29 +104,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="extend_to_mid_nodes", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsExtendToMidNodes:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsExtendToMidNodes
+ inputs:
+ An instance of InputsExtendToMidNodes.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsExtendToMidNodes:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsExtendToMidNodes
+ outputs:
+ An instance of OutputsExtendToMidNodes.
"""
return super().outputs
@@ -149,15 +160,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,12 +181,13 @@ def field(self):
return self._field
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -206,18 +218,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.extend_to_mid_nodes()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py b/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py
index bc0786e0247..87713463659 100644
--- a/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/extend_to_mid_nodes_fc.py
@@ -4,28 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class extend_to_mid_nodes_fc(Operator):
- """Extends Elemental Nodal or Nodal fields defined on corner nodes to
+ r"""Extends Elemental Nodal or Nodal fields defined on corner nodes to
Elemental Nodal fields defined also on the mid nodes.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh : MeshedRegion, optional
- The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.
+ fields_container: FieldsContainer
+ mesh: MeshedRegion, optional
+ The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -60,9 +63,10 @@ def __init__(self, fields_container=None, mesh=None, config=None, server=None):
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Extends Elemental Nodal or Nodal fields defined on corner nodes to
- Elemental Nodal fields defined also on the mid nodes."""
+ def _spec() -> Specification:
+ description = r"""Extends Elemental Nodal or Nodal fields defined on corner nodes to
+Elemental Nodal fields defined also on the mid nodes.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -70,15 +74,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.""",
+ document=r"""The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.""",
),
},
map_output_pin_spec={
@@ -86,14 +88,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -102,29 +104,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="extend_to_mid_nodes_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsExtendToMidNodesFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsExtendToMidNodesFc
+ inputs:
+ An instance of InputsExtendToMidNodesFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsExtendToMidNodesFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsExtendToMidNodesFc
+ outputs:
+ An instance of OutputsExtendToMidNodesFc.
"""
return super().outputs
@@ -153,12 +162,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -171,16 +181,15 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.
+ The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -213,18 +222,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.extend_to_mid_nodes_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/averaging/force_summation.py b/src/ansys/dpf/core/operators/averaging/force_summation.py
index 963d47c97d9..bfa540ec894 100644
--- a/src/ansys/dpf/core/operators/averaging/force_summation.py
+++ b/src/ansys/dpf/core/operators/averaging/force_summation.py
@@ -4,49 +4,46 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class force_summation(Operator):
- """Computes the sum of elemental forces contribution on a set of nodes in
- Global Coordinate System. Equivalent to MAPDL FSUM & NFORCE
- commands. Supports Static, Transient, Modal & Harmonic analysis
- for thermal and structural degrees of freedom.
+ r"""Computes the sum of elemental forces contribution on a set of nodes in
+ Global Coordinate System. Equivalent to MAPDL FSUM & NFORCE commands.
+ Supports Static, Transient, Modal & Harmonic analysis for thermal and
+ structural degrees of freedom.
+
Parameters
----------
- time_scoping : Scoping, optional
- Default = all time steps
- nodal_scoping : Scoping, optional
- Nodal scoping. set of nodes in which
- elemental contribution forces will be
- accumulated (default = all nodes)
- elemental_scoping : Scoping, optional
- Elemental scoping. set of elements
- contributing to the force calcuation.
- (default = all elements)
- data_sources : DataSources
- force_type : int, optional
- Type of force to be processed (0 - default:
- total forces (static, damping, and
- inertia)., 1: static forces, 2:
- damping forces, 3: inertia forces)
- spoint : Field, optional
- Coordinate field of a point for moment
- summations. defaults to (0,0,0).
+ time_scoping: Scoping, optional
+ default = all time steps
+ nodal_scoping: Scoping, optional
+ Nodal Scoping. Set of nodes in which elemental contribution forces will be accumulated (default = all nodes)
+ elemental_scoping: Scoping, optional
+ Elemental Scoping. Set of elements contributing to the force calcuation. (default = all elements)
+ data_sources: DataSources
+ force_type: int, optional
+ Type of force to be processed (0 - default: Total forces (static, damping, and inertia)., 1: Static forces, 2: Damping forces, 3: Inertia forces)
+ spoint: Field, optional
+ Coordinate field of a point for moment summations. Defaults to (0,0,0).
Returns
-------
- force_accumulation : FieldsContainer
- moment_accumulation : FieldsContainer
- heat_accumulation : FieldsContainer
- forces_on_nodes : FieldsContainer
- moments_on_nodes : FieldsContainer
- heats_on_nodes : FieldsContainer
+ force_accumulation: FieldsContainer
+ moment_accumulation: FieldsContainer
+ heat_accumulation: FieldsContainer
+ forces_on_nodes: FieldsContainer
+ moments_on_nodes: FieldsContainer
+ heats_on_nodes: FieldsContainer
Examples
--------
@@ -116,12 +113,12 @@ def __init__(
self.inputs.spoint.connect(spoint)
@staticmethod
- def _spec():
- description = """Computes the sum of elemental forces contribution on a set of nodes in
- Global Coordinate System. Equivalent to MAPDL FSUM &
- NFORCE commands. Supports Static, Transient, Modal &
- Harmonic analysis for thermal and structural degrees of
- freedom."""
+ def _spec() -> Specification:
+ description = r"""Computes the sum of elemental forces contribution on a set of nodes in
+Global Coordinate System. Equivalent to MAPDL FSUM & NFORCE commands.
+Supports Static, Transient, Modal & Harmonic analysis for thermal and
+structural degrees of freedom.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -129,45 +126,37 @@ def _spec():
name="time_scoping",
type_names=["scoping"],
optional=True,
- document="""Default = all time steps""",
+ document=r"""default = all time steps""",
),
1: PinSpecification(
name="nodal_scoping",
type_names=["scoping"],
optional=True,
- document="""Nodal scoping. set of nodes in which
- elemental contribution forces will be
- accumulated (default = all nodes)""",
+ document=r"""Nodal Scoping. Set of nodes in which elemental contribution forces will be accumulated (default = all nodes)""",
),
2: PinSpecification(
name="elemental_scoping",
type_names=["scoping"],
optional=True,
- document="""Elemental scoping. set of elements
- contributing to the force calcuation.
- (default = all elements)""",
+ document=r"""Elemental Scoping. Set of elements contributing to the force calcuation. (default = all elements)""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""""",
+ document=r"""""",
),
5: PinSpecification(
name="force_type",
type_names=["int32"],
optional=True,
- document="""Type of force to be processed (0 - default:
- total forces (static, damping, and
- inertia)., 1: static forces, 2:
- damping forces, 3: inertia forces)""",
+ document=r"""Type of force to be processed (0 - default: Total forces (static, damping, and inertia)., 1: Static forces, 2: Damping forces, 3: Inertia forces)""",
),
6: PinSpecification(
name="spoint",
type_names=["field"],
optional=True,
- document="""Coordinate field of a point for moment
- summations. defaults to (0,0,0).""",
+ document=r"""Coordinate field of a point for moment summations. Defaults to (0,0,0).""",
),
},
map_output_pin_spec={
@@ -175,44 +164,44 @@ def _spec():
name="force_accumulation",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="moment_accumulation",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="heat_accumulation",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
10: PinSpecification(
name="forces_on_nodes",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
11: PinSpecification(
name="moments_on_nodes",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
12: PinSpecification(
name="heats_on_nodes",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -221,29 +210,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="force_summation", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsForceSummation:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsForceSummation
+ inputs:
+ An instance of InputsForceSummation.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsForceSummation:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsForceSummation
+ outputs:
+ An instance of OutputsForceSummation.
"""
return super().outputs
@@ -286,14 +282,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._spoint)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Default = all time steps
+ default = all time steps
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -306,16 +303,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def nodal_scoping(self):
- """Allows to connect nodal_scoping input to the operator.
+ def nodal_scoping(self) -> Input:
+ r"""Allows to connect nodal_scoping input to the operator.
- Nodal scoping. set of nodes in which
- elemental contribution forces will be
- accumulated (default = all nodes)
+ Nodal Scoping. Set of nodes in which elemental contribution forces will be accumulated (default = all nodes)
- Parameters
- ----------
- my_nodal_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -328,16 +324,15 @@ def nodal_scoping(self):
return self._nodal_scoping
@property
- def elemental_scoping(self):
- """Allows to connect elemental_scoping input to the operator.
+ def elemental_scoping(self) -> Input:
+ r"""Allows to connect elemental_scoping input to the operator.
- Elemental scoping. set of elements
- contributing to the force calcuation.
- (default = all elements)
+ Elemental Scoping. Set of elements contributing to the force calcuation. (default = all elements)
- Parameters
- ----------
- my_elemental_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -350,12 +345,13 @@ def elemental_scoping(self):
return self._elemental_scoping
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -368,17 +364,15 @@ def data_sources(self):
return self._data_sources
@property
- def force_type(self):
- """Allows to connect force_type input to the operator.
+ def force_type(self) -> Input:
+ r"""Allows to connect force_type input to the operator.
- Type of force to be processed (0 - default:
- total forces (static, damping, and
- inertia)., 1: static forces, 2:
- damping forces, 3: inertia forces)
+ Type of force to be processed (0 - default: Total forces (static, damping, and inertia)., 1: Static forces, 2: Damping forces, 3: Inertia forces)
- Parameters
- ----------
- my_force_type : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -391,15 +385,15 @@ def force_type(self):
return self._force_type
@property
- def spoint(self):
- """Allows to connect spoint input to the operator.
+ def spoint(self) -> Input:
+ r"""Allows to connect spoint input to the operator.
- Coordinate field of a point for moment
- summations. defaults to (0,0,0).
+ Coordinate field of a point for moment summations. Defaults to (0,0,0).
- Parameters
- ----------
- my_spoint : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -445,103 +439,109 @@ def __init__(self, op: Operator):
self._outputs.append(self._heats_on_nodes)
@property
- def force_accumulation(self):
- """Allows to get force_accumulation output of the operator
+ def force_accumulation(self) -> Output:
+ r"""Allows to get force_accumulation output of the operator
Returns
- ----------
- my_force_accumulation : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.force_summation()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_force_accumulation = op.outputs.force_accumulation()
- """ # noqa: E501
+ """
return self._force_accumulation
@property
- def moment_accumulation(self):
- """Allows to get moment_accumulation output of the operator
+ def moment_accumulation(self) -> Output:
+ r"""Allows to get moment_accumulation output of the operator
Returns
- ----------
- my_moment_accumulation : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.force_summation()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_moment_accumulation = op.outputs.moment_accumulation()
- """ # noqa: E501
+ """
return self._moment_accumulation
@property
- def heat_accumulation(self):
- """Allows to get heat_accumulation output of the operator
+ def heat_accumulation(self) -> Output:
+ r"""Allows to get heat_accumulation output of the operator
Returns
- ----------
- my_heat_accumulation : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.force_summation()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_heat_accumulation = op.outputs.heat_accumulation()
- """ # noqa: E501
+ """
return self._heat_accumulation
@property
- def forces_on_nodes(self):
- """Allows to get forces_on_nodes output of the operator
+ def forces_on_nodes(self) -> Output:
+ r"""Allows to get forces_on_nodes output of the operator
Returns
- ----------
- my_forces_on_nodes : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.force_summation()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_forces_on_nodes = op.outputs.forces_on_nodes()
- """ # noqa: E501
+ """
return self._forces_on_nodes
@property
- def moments_on_nodes(self):
- """Allows to get moments_on_nodes output of the operator
+ def moments_on_nodes(self) -> Output:
+ r"""Allows to get moments_on_nodes output of the operator
Returns
- ----------
- my_moments_on_nodes : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.force_summation()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_moments_on_nodes = op.outputs.moments_on_nodes()
- """ # noqa: E501
+ """
return self._moments_on_nodes
@property
- def heats_on_nodes(self):
- """Allows to get heats_on_nodes output of the operator
+ def heats_on_nodes(self) -> Output:
+ r"""Allows to get heats_on_nodes output of the operator
Returns
- ----------
- my_heats_on_nodes : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.force_summation()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_heats_on_nodes = op.outputs.heats_on_nodes()
- """ # noqa: E501
+ """
return self._heats_on_nodes
diff --git a/src/ansys/dpf/core/operators/averaging/gauss_to_node_fc.py b/src/ansys/dpf/core/operators/averaging/gauss_to_node_fc.py
index 438456813e1..f9ee7ff551e 100644
--- a/src/ansys/dpf/core/operators/averaging/gauss_to_node_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/gauss_to_node_fc.py
@@ -4,36 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class gauss_to_node_fc(Operator):
- """Extrapolates results available at Gauss or quadrature points to nodal
+ r"""Extrapolates results available at Gauss or quadrature points to nodal
points for a field container. The available elements are: Linear
- quadrangle, parabolic quadrangle, linear hexagonal, quadratic
- hexagonal, linear tetrahedral, and quadratic tetrahedral.
+ quadrangle, parabolic quadrangle, linear hexagonal, quadratic hexagonal,
+ linear tetrahedral, and quadratic tetrahedral.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh : MeshedRegion or MeshesContainer, optional
- The mesh region in this pin is used for
- extrapolating results available at
- gauss or quadrature points to nodal
- points.
- scoping : Scoping, optional
- Extrapolating results on the selected
- scoping. if it is a scoping
- container, the label must correspond
- to the one of the fields containers.
+ fields_container: FieldsContainer
+ mesh: MeshedRegion or MeshesContainer, optional
+ The mesh region in this pin is used for extrapolating results available at Gauss or quadrature points to nodal points.
+ scoping: Scoping, optional
+ Extrapolating results on the selected scoping. If it is a scoping container, the label must correspond to the one of the fields containers.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -75,12 +74,12 @@ def __init__(
self.inputs.scoping.connect(scoping)
@staticmethod
- def _spec():
- description = """Extrapolates results available at Gauss or quadrature points to nodal
- points for a field container. The available elements are:
- Linear quadrangle, parabolic quadrangle, linear hexagonal,
- quadratic hexagonal, linear tetrahedral, and quadratic
- tetrahedral."""
+ def _spec() -> Specification:
+ description = r"""Extrapolates results available at Gauss or quadrature points to nodal
+points for a field container. The available elements are: Linear
+quadrangle, parabolic quadrangle, linear hexagonal, quadratic hexagonal,
+linear tetrahedral, and quadratic tetrahedral.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,25 +87,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""The mesh region in this pin is used for
- extrapolating results available at
- gauss or quadrature points to nodal
- points.""",
+ document=r"""The mesh region in this pin is used for extrapolating results available at Gauss or quadrature points to nodal points.""",
),
3: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""Extrapolating results on the selected
- scoping. if it is a scoping
- container, the label must correspond
- to the one of the fields containers.""",
+ document=r"""Extrapolating results on the selected scoping. If it is a scoping container, the label must correspond to the one of the fields containers.""",
),
},
map_output_pin_spec={
@@ -114,14 +107,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -130,29 +123,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="gauss_to_node_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsGaussToNodeFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsGaussToNodeFc
+ inputs:
+ An instance of InputsGaussToNodeFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsGaussToNodeFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsGaussToNodeFc
+ outputs:
+ An instance of OutputsGaussToNodeFc.
"""
return super().outputs
@@ -183,12 +183,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._scoping)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -201,17 +202,15 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- The mesh region in this pin is used for
- extrapolating results available at
- gauss or quadrature points to nodal
- points.
+ The mesh region in this pin is used for extrapolating results available at Gauss or quadrature points to nodal points.
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -224,17 +223,15 @@ def mesh(self):
return self._mesh
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- Extrapolating results on the selected
- scoping. if it is a scoping
- container, the label must correspond
- to the one of the fields containers.
+ Extrapolating results on the selected scoping. If it is a scoping container, the label must correspond to the one of the fields containers.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -265,18 +262,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.gauss_to_node_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/averaging/nodal_difference.py b/src/ansys/dpf/core/operators/averaging/nodal_difference.py
index f1a5b7cefcb..02bd4efb353 100644
--- a/src/ansys/dpf/core/operators/averaging/nodal_difference.py
+++ b/src/ansys/dpf/core/operators/averaging/nodal_difference.py
@@ -4,31 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class nodal_difference(Operator):
- """Transforms an Elemental Nodal field into a Nodal field. Each nodal
- value is the maximum difference between the unaveraged computed
- result for all elements that share this particular node. The
- result is computed on a given node's scoping.
+ r"""Transforms an Elemental Nodal field into a Nodal field. Each nodal value
+ is the maximum difference between the unaveraged computed result for all
+ elements that share this particular node. The result is computed on a
+ given node’s scoping.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- mesh_scoping : Scoping, optional
- Average only on these entities
- mesh : MeshedRegion, optional
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ mesh_scoping: Scoping, optional
+ average only on these entities
+ mesh: MeshedRegion, optional
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -70,12 +74,12 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Transforms an Elemental Nodal field into a Nodal field. Each nodal
- value is the maximum difference between the unaveraged
- computed result for all elements that share this
- particular node. The result is computed on a given node's
- scoping."""
+ def _spec() -> Specification:
+ description = r"""Transforms an Elemental Nodal field into a Nodal field. Each nodal value
+is the maximum difference between the unaveraged computed result for all
+elements that share this particular node. The result is computed on a
+given node’s scoping.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -83,20 +87,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""Average only on these entities""",
+ document=r"""average only on these entities""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -104,14 +107,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -120,29 +123,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="nodal_difference", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsNodalDifference:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsNodalDifference
+ inputs:
+ An instance of InputsNodalDifference.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsNodalDifference:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsNodalDifference
+ outputs:
+ An instance of OutputsNodalDifference.
"""
return super().outputs
@@ -173,15 +183,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -194,14 +204,15 @@ def field(self):
return self._field
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Average only on these entities
+ average only on these entities
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -214,12 +225,13 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -250,18 +262,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.nodal_difference()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/averaging/nodal_difference_fc.py b/src/ansys/dpf/core/operators/averaging/nodal_difference_fc.py
index df0889ea05a..9da39aa47d0 100644
--- a/src/ansys/dpf/core/operators/averaging/nodal_difference_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/nodal_difference_fc.py
@@ -4,37 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class nodal_difference_fc(Operator):
- """Transforms Elemental Nodal fields into Nodal fields. Each nodal value
- is the maximum difference between the unaveraged computed result
- for all elements that share this particular node. The result is
- computed on a given node scoping. If the input fields are mixed
- shell/solid, then the fields are split by element shape and the
- output fields container has an elshape label.
+ r"""Transforms Elemental Nodal fields into Nodal fields. Each nodal value is
+ the maximum difference between the unaveraged computed result for all
+ elements that share this particular node. The result is computed on a
+ given node scoping. If the input fields are mixed shell/solid, then the
+ fields are split by element shape and the output fields container has an
+ elshape label.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh : MeshedRegion or MeshesContainer, optional
- The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.
- scoping : Scoping or ScopingsContainer, optional
- Average only on these nodes. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.
+ fields_container: FieldsContainer
+ mesh: MeshedRegion or MeshesContainer, optional
+ The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.
+ scoping: Scoping or ScopingsContainer, optional
+ Average only on these nodes. If it is a scoping container, the label must correspond to the one of the fields containers.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -76,14 +76,14 @@ def __init__(
self.inputs.scoping.connect(scoping)
@staticmethod
- def _spec():
- description = """Transforms Elemental Nodal fields into Nodal fields. Each nodal value
- is the maximum difference between the unaveraged computed
- result for all elements that share this particular node.
- The result is computed on a given node scoping. If the
- input fields are mixed shell/solid, then the fields are
- split by element shape and the output fields container has
- an elshape label."""
+ def _spec() -> Specification:
+ description = r"""Transforms Elemental Nodal fields into Nodal fields. Each nodal value is
+the maximum difference between the unaveraged computed result for all
+elements that share this particular node. The result is computed on a
+given node scoping. If the input fields are mixed shell/solid, then the
+fields are split by element shape and the output fields container has an
+elshape label.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -91,24 +91,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.""",
+ document=r"""The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.""",
),
3: PinSpecification(
name="scoping",
type_names=["scoping", "scopings_container"],
optional=True,
- document="""Average only on these nodes. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.""",
+ document=r"""Average only on these nodes. If it is a scoping container, the label must correspond to the one of the fields containers.""",
),
},
map_output_pin_spec={
@@ -116,14 +111,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -132,29 +127,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="nodal_difference_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsNodalDifferenceFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsNodalDifferenceFc
+ inputs:
+ An instance of InputsNodalDifferenceFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsNodalDifferenceFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsNodalDifferenceFc
+ outputs:
+ An instance of OutputsNodalDifferenceFc.
"""
return super().outputs
@@ -187,12 +189,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._scoping)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -205,16 +208,15 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.
+ The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -227,17 +229,15 @@ def mesh(self):
return self._mesh
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- Average only on these nodes. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.
+ Average only on these nodes. If it is a scoping container, the label must correspond to the one of the fields containers.
- Parameters
- ----------
- my_scoping : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -270,18 +270,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.nodal_difference_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/averaging/nodal_fraction_fc.py b/src/ansys/dpf/core/operators/averaging/nodal_fraction_fc.py
index d92b5819d9a..a214f81af00 100644
--- a/src/ansys/dpf/core/operators/averaging/nodal_fraction_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/nodal_fraction_fc.py
@@ -4,39 +4,36 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class nodal_fraction_fc(Operator):
- """Transforms Elemental Nodal fields into Nodal fields. Each nodal value
- is the fraction between the nodal difference and the nodal
- average. The result is computed on a given node's scoping.
+ r"""Transforms Elemental Nodal fields into Nodal fields. Each nodal value is
+ the fraction between the nodal difference and the nodal average. The
+ result is computed on a given node’s scoping.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh : MeshedRegion, optional
- The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.
- scoping : Scoping, optional
- Average only on these nodes. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.
- denominator : FieldsContainer, optional
- If a fields container is set in this pin, it
- is used as the denominator of the
- fraction instead of
- elemental_nodal_to_nodal_fc.
+ fields_container: FieldsContainer
+ mesh: MeshedRegion, optional
+ The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.
+ scoping: Scoping, optional
+ Average only on these nodes. If it is a scoping container, the label must correspond to the one of the fields containers.
+ denominator: FieldsContainer, optional
+ If a fields container is set in this pin, it is used as the denominator of the fraction instead of elemental_nodal_To_nodal_fc.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -89,10 +86,11 @@ def __init__(
self.inputs.denominator.connect(denominator)
@staticmethod
- def _spec():
- description = """Transforms Elemental Nodal fields into Nodal fields. Each nodal value
- is the fraction between the nodal difference and the nodal
- average. The result is computed on a given node's scoping."""
+ def _spec() -> Specification:
+ description = r"""Transforms Elemental Nodal fields into Nodal fields. Each nodal value is
+the fraction between the nodal difference and the nodal average. The
+result is computed on a given node’s scoping.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -100,33 +98,25 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.""",
+ document=r"""The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.""",
),
3: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""Average only on these nodes. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.""",
+ document=r"""Average only on these nodes. If it is a scoping container, the label must correspond to the one of the fields containers.""",
),
6: PinSpecification(
name="denominator",
type_names=["fields_container"],
optional=True,
- document="""If a fields container is set in this pin, it
- is used as the denominator of the
- fraction instead of
- elemental_nodal_to_nodal_fc.""",
+ document=r"""If a fields container is set in this pin, it is used as the denominator of the fraction instead of elemental_nodal_To_nodal_fc.""",
),
},
map_output_pin_spec={
@@ -134,14 +124,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -150,29 +140,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="nodal_fraction_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsNodalFractionFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsNodalFractionFc
+ inputs:
+ An instance of InputsNodalFractionFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsNodalFractionFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsNodalFractionFc
+ outputs:
+ An instance of OutputsNodalFractionFc.
"""
return super().outputs
@@ -209,12 +206,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._denominator)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -227,16 +225,15 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.
+ The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -249,17 +246,15 @@ def mesh(self):
return self._mesh
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- Average only on these nodes. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.
+ Average only on these nodes. If it is a scoping container, the label must correspond to the one of the fields containers.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -272,17 +267,15 @@ def scoping(self):
return self._scoping
@property
- def denominator(self):
- """Allows to connect denominator input to the operator.
+ def denominator(self) -> Input:
+ r"""Allows to connect denominator input to the operator.
- If a fields container is set in this pin, it
- is used as the denominator of the
- fraction instead of
- elemental_nodal_to_nodal_fc.
+ If a fields container is set in this pin, it is used as the denominator of the fraction instead of elemental_nodal_To_nodal_fc.
- Parameters
- ----------
- my_denominator : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -313,18 +306,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.nodal_fraction_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental.py b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental.py
index a85e597ef5e..448b7d0e8da 100644
--- a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental.py
+++ b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental.py
@@ -4,31 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class nodal_to_elemental(Operator):
- """Transforms a Nodal field to an Elemental field, The result is computed
- on a given element's scoping.
+ r"""Transforms a Nodal field to an Elemental field, The result is computed
+ on a given element’s scoping.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- mesh_scoping : Scoping, optional
- collapse_shell_layers : bool, optional
- If true, the data across different shell
- layers is averaged as well (default
- is false).
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ mesh_scoping: Scoping, optional
+ collapse_shell_layers: bool, optional
+ If true, the data across different shell layers is averaged as well (default is false).
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -75,9 +77,10 @@ def __init__(
self.inputs.collapse_shell_layers.connect(collapse_shell_layers)
@staticmethod
- def _spec():
- description = """Transforms a Nodal field to an Elemental field, The result is computed
- on a given element's scoping."""
+ def _spec() -> Specification:
+ description = r"""Transforms a Nodal field to an Elemental field, The result is computed
+on a given element’s scoping.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -85,22 +88,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
10: PinSpecification(
name="collapse_shell_layers",
type_names=["bool"],
optional=True,
- document="""If true, the data across different shell
- layers is averaged as well (default
- is false).""",
+ document=r"""If true, the data across different shell layers is averaged as well (default is false).""",
),
},
map_output_pin_spec={
@@ -108,14 +108,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -124,29 +124,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="nodal_to_elemental", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsNodalToElemental:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsNodalToElemental
+ inputs:
+ An instance of InputsNodalToElemental.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsNodalToElemental:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsNodalToElemental
+ outputs:
+ An instance of OutputsNodalToElemental.
"""
return super().outputs
@@ -179,15 +186,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._collapse_shell_layers)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -200,12 +207,13 @@ def field(self):
return self._field
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -218,16 +226,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def collapse_shell_layers(self):
- """Allows to connect collapse_shell_layers input to the operator.
+ def collapse_shell_layers(self) -> Input:
+ r"""Allows to connect collapse_shell_layers input to the operator.
- If true, the data across different shell
- layers is averaged as well (default
- is false).
+ If true, the data across different shell layers is averaged as well (default is false).
- Parameters
- ----------
- my_collapse_shell_layers : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -258,18 +265,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.nodal_to_elemental()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_fc.py b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_fc.py
index 1a41f684c7a..bfe5610613f 100644
--- a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_fc.py
@@ -4,53 +4,42 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class nodal_to_elemental_fc(Operator):
- """Transforms Nodal fields into Elemental fields using an averaging
- process. The result is computed on a given element's scoping. If
- the input fields are mixed shell/solid, and the shell's layers are
- not specified as collapsed, then the fields are split by element
- shape and the output fields container has an elshape label.
+ r"""Transforms Nodal fields into Elemental fields using an averaging
+ process. The result is computed on a given element’s scoping. If the
+ input fields are mixed shell/solid, and the shell’s layers are not
+ specified as collapsed, then the fields are split by element shape and
+ the output fields container has an elshape label.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh : MeshedRegion or MeshesContainer, optional
- The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.
- scoping : Scoping or ScopingsContainer, optional
- Average only on these elements. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.
- collapse_shell_layers : bool, optional
- If true, the data across different shell
- layers is averaged as well (default
- is false).
- merge_solid_shell : bool, optional
- For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). if this pin is
- true and collapse_shell_layers is
- false, a shell_layer needs to be
- specified.
- shell_layer : int, optional
- 0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. if merge_solid_shell is
- true, this pin needs to be specified
- to a value that extracts only one
- layer (top, bottom or mid).
+ fields_container: FieldsContainer
+ mesh: MeshedRegion or MeshesContainer, optional
+ The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.
+ scoping: Scoping or ScopingsContainer, optional
+ Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields containers.
+ collapse_shell_layers: bool, optional
+ If true, the data across different shell layers is averaged as well (default is false).
+ merge_solid_shell: bool, optional
+ For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true and collapse_shell_layers is false, a shell_layer needs to be specified.
+ shell_layer: int, optional
+ 0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid).
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -115,13 +104,13 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Transforms Nodal fields into Elemental fields using an averaging
- process. The result is computed on a given element's
- scoping. If the input fields are mixed shell/solid, and
- the shell's layers are not specified as collapsed, then
- the fields are split by element shape and the output
- fields container has an elshape label."""
+ def _spec() -> Specification:
+ description = r"""Transforms Nodal fields into Elemental fields using an averaging
+process. The result is computed on a given element’s scoping. If the
+input fields are mixed shell/solid, and the shell’s layers are not
+specified as collapsed, then the fields are split by element shape and
+the output fields container has an elshape label.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -129,53 +118,37 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.""",
+ document=r"""The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.""",
),
3: PinSpecification(
name="scoping",
type_names=["scoping", "scopings_container"],
optional=True,
- document="""Average only on these elements. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.""",
+ document=r"""Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields containers.""",
),
10: PinSpecification(
name="collapse_shell_layers",
type_names=["bool"],
optional=True,
- document="""If true, the data across different shell
- layers is averaged as well (default
- is false).""",
+ document=r"""If true, the data across different shell layers is averaged as well (default is false).""",
),
26: PinSpecification(
name="merge_solid_shell",
type_names=["bool"],
optional=True,
- document="""For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). if this pin is
- true and collapse_shell_layers is
- false, a shell_layer needs to be
- specified.""",
+ document=r"""For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true and collapse_shell_layers is false, a shell_layer needs to be specified.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. if merge_solid_shell is
- true, this pin needs to be specified
- to a value that extracts only one
- layer (top, bottom or mid).""",
+ document=r"""0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid).""",
),
},
map_output_pin_spec={
@@ -183,14 +156,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -199,29 +172,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="nodal_to_elemental_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsNodalToElementalFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsNodalToElementalFc
+ inputs:
+ An instance of InputsNodalToElementalFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsNodalToElementalFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsNodalToElementalFc
+ outputs:
+ An instance of OutputsNodalToElementalFc.
"""
return super().outputs
@@ -272,12 +252,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -290,16 +271,15 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- The mesh region in this pin is used to
- perform the averaging. it is used if
- there is no fields support.
+ The mesh region in this pin is used to perform the averaging. It is used if there is no fields support.
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -312,17 +292,15 @@ def mesh(self):
return self._mesh
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- Average only on these elements. if it is a
- scoping container, the label must
- correspond to the one of the fields
- containers.
+ Average only on these elements. If it is a scoping container, the label must correspond to the one of the fields containers.
- Parameters
- ----------
- my_scoping : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -335,16 +313,15 @@ def scoping(self):
return self._scoping
@property
- def collapse_shell_layers(self):
- """Allows to connect collapse_shell_layers input to the operator.
+ def collapse_shell_layers(self) -> Input:
+ r"""Allows to connect collapse_shell_layers input to the operator.
- If true, the data across different shell
- layers is averaged as well (default
- is false).
+ If true, the data across different shell layers is averaged as well (default is false).
- Parameters
- ----------
- my_collapse_shell_layers : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -357,19 +334,15 @@ def collapse_shell_layers(self):
return self._collapse_shell_layers
@property
- def merge_solid_shell(self):
- """Allows to connect merge_solid_shell input to the operator.
+ def merge_solid_shell(self) -> Input:
+ r"""Allows to connect merge_solid_shell input to the operator.
- For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). if this pin is
- true and collapse_shell_layers is
- false, a shell_layer needs to be
- specified.
+ For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true and collapse_shell_layers is false, a shell_layer needs to be specified.
- Parameters
- ----------
- my_merge_solid_shell : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -382,18 +355,15 @@ def merge_solid_shell(self):
return self._merge_solid_shell
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- 0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. if merge_solid_shell is
- true, this pin needs to be specified
- to a value that extracts only one
- layer (top, bottom or mid).
+ 0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid).
- Parameters
- ----------
- my_shell_layer : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -426,18 +396,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.nodal_to_elemental_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal.py b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal.py
index 4311e7ec635..aa7856994ea 100644
--- a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal.py
+++ b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal.py
@@ -4,31 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class nodal_to_elemental_nodal(Operator):
- """Transforms a Nodal field to an ElementalNodal field, The result is
- computed on a given element's scoping.
+ r"""Transforms a Nodal field to an ElementalNodal field, The result is
+ computed on a given element’s scoping.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- mesh_scoping : Scoping, optional
- collapse_shell_layers : bool, optional
- If true, the data across different shell
- layers is averaged as well (default
- is false).
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ mesh_scoping: Scoping, optional
+ collapse_shell_layers: bool, optional
+ If true, the data across different shell layers is averaged as well (default is false).
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -75,9 +77,10 @@ def __init__(
self.inputs.collapse_shell_layers.connect(collapse_shell_layers)
@staticmethod
- def _spec():
- description = """Transforms a Nodal field to an ElementalNodal field, The result is
- computed on a given element's scoping."""
+ def _spec() -> Specification:
+ description = r"""Transforms a Nodal field to an ElementalNodal field, The result is
+computed on a given element’s scoping.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -85,22 +88,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
10: PinSpecification(
name="collapse_shell_layers",
type_names=["bool"],
optional=True,
- document="""If true, the data across different shell
- layers is averaged as well (default
- is false).""",
+ document=r"""If true, the data across different shell layers is averaged as well (default is false).""",
),
},
map_output_pin_spec={
@@ -108,14 +108,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -124,29 +124,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="nodal_to_elemental_nodal", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsNodalToElementalNodal:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsNodalToElementalNodal
+ inputs:
+ An instance of InputsNodalToElementalNodal.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsNodalToElementalNodal:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsNodalToElementalNodal
+ outputs:
+ An instance of OutputsNodalToElementalNodal.
"""
return super().outputs
@@ -181,15 +188,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._collapse_shell_layers)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -202,12 +209,13 @@ def field(self):
return self._field
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -220,16 +228,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def collapse_shell_layers(self):
- """Allows to connect collapse_shell_layers input to the operator.
+ def collapse_shell_layers(self) -> Input:
+ r"""Allows to connect collapse_shell_layers input to the operator.
- If true, the data across different shell
- layers is averaged as well (default
- is false).
+ If true, the data across different shell layers is averaged as well (default is false).
- Parameters
- ----------
- my_collapse_shell_layers : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -260,18 +267,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.nodal_to_elemental_nodal()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal_fc.py b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal_fc.py
index f41bc888eec..efbc344d618 100644
--- a/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/nodal_to_elemental_nodal_fc.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class nodal_to_elemental_nodal_fc(Operator):
- """Transforms Nodal fields_container to Elemental Nodal fields_container.
+ r"""Transforms Nodal fields_container to Elemental Nodal fields_container.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh : MeshedRegion, optional
- mesh_scoping : Scoping, optional
+ fields_container: FieldsContainer
+ mesh: MeshedRegion, optional
+ mesh_scoping: Scoping, optional
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -71,10 +76,9 @@ def __init__(
self.inputs.mesh_scoping.connect(mesh_scoping)
@staticmethod
- def _spec():
- description = (
- """Transforms Nodal fields_container to Elemental Nodal fields_container."""
- )
+ def _spec() -> Specification:
+ description = r"""Transforms Nodal fields_container to Elemental Nodal fields_container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -82,19 +86,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -102,14 +106,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -118,31 +122,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="nodal_to_elemental_nodal_fc", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsNodalToElementalNodalFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsNodalToElementalNodalFc
+ inputs:
+ An instance of InputsNodalToElementalNodalFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsNodalToElementalNodalFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsNodalToElementalNodalFc
+ outputs:
+ An instance of OutputsNodalToElementalNodalFc.
"""
return super().outputs
@@ -177,12 +188,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh_scoping)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -195,12 +207,13 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -213,12 +226,13 @@ def mesh(self):
return self._mesh
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -251,18 +265,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.nodal_to_elemental_nodal_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/averaging/to_elemental_fc.py b/src/ansys/dpf/core/operators/averaging/to_elemental_fc.py
index b4e3cc036c6..f8e54f41d67 100644
--- a/src/ansys/dpf/core/operators/averaging/to_elemental_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/to_elemental_fc.py
@@ -4,48 +4,39 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class to_elemental_fc(Operator):
- """Transforms Input fields into Elemental fields using an averaging
- process. The result is computed on a given element's scoping.
+ r"""Transforms Input fields into Elemental fields using an averaging
+ process. The result is computed on a given element’s scoping.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh : MeshedRegion, optional
- mesh_scoping : Scoping, optional
- smoothen_values : bool, optional
- If it is set to true, elemental nodal fields
- are first averaged on nodes and then
- averaged on elements (default is
- false).
- collapse_shell_layers : bool, optional
- If true, the data across different shell
- layers is averaged as well (default
- is false).
- merge_solid_shell : bool, optional
- For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). if this pin is
- true and collapse_shell_layers is
- false, a shell_layer needs to be
- specified.
- shell_layer : int, optional
- 0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. if merge_solid_shell is
- true, this pin needs to be specified
- to a value that extracts only one
- layer (top, bottom or mid).
+ fields_container: FieldsContainer
+ mesh: MeshedRegion, optional
+ mesh_scoping: Scoping, optional
+ smoothen_values: bool, optional
+ if it is set to true, Elemental Nodal fields are first averaged on nodes and then averaged on elements (default is false).
+ collapse_shell_layers: bool, optional
+ If true, the data across different shell layers is averaged as well (default is false).
+ merge_solid_shell: bool, optional
+ For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true and collapse_shell_layers is false, a shell_layer needs to be specified.
+ shell_layer: int, optional
+ 0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid).
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -116,10 +107,10 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Transforms Input fields into Elemental fields using an averaging
- process. The result is computed on a given element's
- scoping."""
+ def _spec() -> Specification:
+ description = r"""Transforms Input fields into Elemental fields using an averaging
+process. The result is computed on a given element’s scoping.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -127,57 +118,43 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
7: PinSpecification(
name="smoothen_values",
type_names=["bool"],
optional=True,
- document="""If it is set to true, elemental nodal fields
- are first averaged on nodes and then
- averaged on elements (default is
- false).""",
+ document=r"""if it is set to true, Elemental Nodal fields are first averaged on nodes and then averaged on elements (default is false).""",
),
10: PinSpecification(
name="collapse_shell_layers",
type_names=["bool"],
optional=True,
- document="""If true, the data across different shell
- layers is averaged as well (default
- is false).""",
+ document=r"""If true, the data across different shell layers is averaged as well (default is false).""",
),
26: PinSpecification(
name="merge_solid_shell",
type_names=["bool"],
optional=True,
- document="""For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). if this pin is
- true and collapse_shell_layers is
- false, a shell_layer needs to be
- specified.""",
+ document=r"""For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true and collapse_shell_layers is false, a shell_layer needs to be specified.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. if merge_solid_shell is
- true, this pin needs to be specified
- to a value that extracts only one
- layer (top, bottom or mid).""",
+ document=r"""0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid).""",
),
},
map_output_pin_spec={
@@ -185,14 +162,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -201,29 +178,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="to_elemental_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsToElementalFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsToElementalFc
+ inputs:
+ An instance of InputsToElementalFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsToElementalFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsToElementalFc
+ outputs:
+ An instance of OutputsToElementalFc.
"""
return super().outputs
@@ -274,12 +258,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -292,12 +277,13 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -310,12 +296,13 @@ def mesh(self):
return self._mesh
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -328,17 +315,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def smoothen_values(self):
- """Allows to connect smoothen_values input to the operator.
+ def smoothen_values(self) -> Input:
+ r"""Allows to connect smoothen_values input to the operator.
- If it is set to true, elemental nodal fields
- are first averaged on nodes and then
- averaged on elements (default is
- false).
+ if it is set to true, Elemental Nodal fields are first averaged on nodes and then averaged on elements (default is false).
- Parameters
- ----------
- my_smoothen_values : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -351,16 +336,15 @@ def smoothen_values(self):
return self._smoothen_values
@property
- def collapse_shell_layers(self):
- """Allows to connect collapse_shell_layers input to the operator.
+ def collapse_shell_layers(self) -> Input:
+ r"""Allows to connect collapse_shell_layers input to the operator.
- If true, the data across different shell
- layers is averaged as well (default
- is false).
+ If true, the data across different shell layers is averaged as well (default is false).
- Parameters
- ----------
- my_collapse_shell_layers : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -373,19 +357,15 @@ def collapse_shell_layers(self):
return self._collapse_shell_layers
@property
- def merge_solid_shell(self):
- """Allows to connect merge_solid_shell input to the operator.
+ def merge_solid_shell(self) -> Input:
+ r"""Allows to connect merge_solid_shell input to the operator.
- For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). if this pin is
- true and collapse_shell_layers is
- false, a shell_layer needs to be
- specified.
+ For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true and collapse_shell_layers is false, a shell_layer needs to be specified.
- Parameters
- ----------
- my_merge_solid_shell : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -398,18 +378,15 @@ def merge_solid_shell(self):
return self._merge_solid_shell
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- 0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. if merge_solid_shell is
- true, this pin needs to be specified
- to a value that extracts only one
- layer (top, bottom or mid).
+ 0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid).
- Parameters
- ----------
- my_shell_layer : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -440,18 +417,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.to_elemental_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/averaging/to_elemental_nodal_fc.py b/src/ansys/dpf/core/operators/averaging/to_elemental_nodal_fc.py
index 7005322325d..ef0a6f712de 100644
--- a/src/ansys/dpf/core/operators/averaging/to_elemental_nodal_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/to_elemental_nodal_fc.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class to_elemental_nodal_fc(Operator):
- """Transforms fields into Elemental Nodal fields using an averaging
- process. The result is computed on a given element's scoping.
+ r"""Transforms fields into Elemental Nodal fields using an averaging
+ process. The result is computed on a given element’s scoping.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh_scoping : Scoping, optional
- mesh : MeshedRegion, optional
+ fields_container: FieldsContainer
+ mesh_scoping: Scoping, optional
+ mesh: MeshedRegion, optional
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -70,10 +75,10 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Transforms fields into Elemental Nodal fields using an averaging
- process. The result is computed on a given element's
- scoping."""
+ def _spec() -> Specification:
+ description = r"""Transforms fields into Elemental Nodal fields using an averaging
+process. The result is computed on a given element’s scoping.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -81,19 +86,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -101,14 +106,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -117,29 +122,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="to_elemental_nodal_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsToElementalNodalFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsToElementalNodalFc
+ inputs:
+ An instance of InputsToElementalNodalFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsToElementalNodalFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsToElementalNodalFc
+ outputs:
+ An instance of OutputsToElementalNodalFc.
"""
return super().outputs
@@ -174,12 +186,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -192,12 +205,13 @@ def fields_container(self):
return self._fields_container
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -210,12 +224,13 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -248,18 +263,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.to_elemental_nodal_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/averaging/to_nodal.py b/src/ansys/dpf/core/operators/averaging/to_nodal.py
index 9f09361b959..a400db4906a 100644
--- a/src/ansys/dpf/core/operators/averaging/to_nodal.py
+++ b/src/ansys/dpf/core/operators/averaging/to_nodal.py
@@ -4,39 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class to_nodal(Operator):
- """Transforms a field into a Nodal field using an averaging process. The
- result is computed on a given node's scoping.
+ r"""Transforms a field into a Nodal field using an averaging process. The
+ result is computed on a given node’s scoping.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- mesh_scoping : Scoping, optional
- merge_solid_shell : bool, optional
- For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). if this pin is
- true, a shell_layer needs to be
- specified.
- shell_layer : int, optional
- 0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. if merge_solid_shell is
- true, this pin needs to be specified
- to a value that extracts only one
- layer (top, bottom or mid).
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ mesh_scoping: Scoping, optional
+ merge_solid_shell: bool, optional
+ For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true, a shell_layer needs to be specified.
+ shell_layer: int, optional
+ 0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid).
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -89,9 +85,10 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Transforms a field into a Nodal field using an averaging process. The
- result is computed on a given node's scoping."""
+ def _spec() -> Specification:
+ description = r"""Transforms a field into a Nodal field using an averaging process. The
+result is computed on a given node’s scoping.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -99,34 +96,25 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
26: PinSpecification(
name="merge_solid_shell",
type_names=["bool"],
optional=True,
- document="""For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). if this pin is
- true, a shell_layer needs to be
- specified.""",
+ document=r"""For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true, a shell_layer needs to be specified.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. if merge_solid_shell is
- true, this pin needs to be specified
- to a value that extracts only one
- layer (top, bottom or mid).""",
+ document=r"""0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid).""",
),
},
map_output_pin_spec={
@@ -134,14 +122,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -150,29 +138,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="to_nodal", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsToNodal:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsToNodal
+ inputs:
+ An instance of InputsToNodal.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsToNodal:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsToNodal
+ outputs:
+ An instance of OutputsToNodal.
"""
return super().outputs
@@ -207,15 +202,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -228,12 +223,13 @@ def field(self):
return self._field
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -246,18 +242,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def merge_solid_shell(self):
- """Allows to connect merge_solid_shell input to the operator.
+ def merge_solid_shell(self) -> Input:
+ r"""Allows to connect merge_solid_shell input to the operator.
- For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). if this pin is
- true, a shell_layer needs to be
- specified.
+ For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true, a shell_layer needs to be specified.
- Parameters
- ----------
- my_merge_solid_shell : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -270,18 +263,15 @@ def merge_solid_shell(self):
return self._merge_solid_shell
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- 0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. if merge_solid_shell is
- true, this pin needs to be specified
- to a value that extracts only one
- layer (top, bottom or mid).
+ 0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid).
- Parameters
- ----------
- my_shell_layer : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -312,18 +302,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.to_nodal()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/averaging/to_nodal_fc.py b/src/ansys/dpf/core/operators/averaging/to_nodal_fc.py
index 09c6dd10150..cc998dd971c 100644
--- a/src/ansys/dpf/core/operators/averaging/to_nodal_fc.py
+++ b/src/ansys/dpf/core/operators/averaging/to_nodal_fc.py
@@ -4,38 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class to_nodal_fc(Operator):
- """Transforms fields into Nodal fields using an averaging process. The
- result is computed on a given node's scoping.
+ r"""Transforms fields into Nodal fields using an averaging process. The
+ result is computed on a given node’s scoping.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh : MeshedRegion, optional
- mesh_scoping : Scoping, optional
- merge_solid_shell : bool, optional
- For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). if this pin is
- true, a shell_layer needs to be
- specified.
- shell_layer : int, optional
- 0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. if merge_solid_shell is
- true, this pin needs to be specified
- to a value that extracts only one
- layer (top, bottom or mid).
+ fields_container: FieldsContainer
+ mesh: MeshedRegion, optional
+ mesh_scoping: Scoping, optional
+ merge_solid_shell: bool, optional
+ For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true, a shell_layer needs to be specified.
+ shell_layer: int, optional
+ 0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid).
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -94,9 +91,10 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Transforms fields into Nodal fields using an averaging process. The
- result is computed on a given node's scoping."""
+ def _spec() -> Specification:
+ description = r"""Transforms fields into Nodal fields using an averaging process. The
+result is computed on a given node’s scoping.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -104,39 +102,31 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
26: PinSpecification(
name="merge_solid_shell",
type_names=["bool"],
optional=True,
- document="""For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). if this pin is
- true, a shell_layer needs to be
- specified.""",
+ document=r"""For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true, a shell_layer needs to be specified.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. if merge_solid_shell is
- true, this pin needs to be specified
- to a value that extracts only one
- layer (top, bottom or mid).""",
+ document=r"""0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid).""",
),
},
map_output_pin_spec={
@@ -144,14 +134,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -160,29 +150,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="to_nodal_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsToNodalFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsToNodalFc
+ inputs:
+ An instance of InputsToNodalFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsToNodalFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsToNodalFc
+ outputs:
+ An instance of OutputsToNodalFc.
"""
return super().outputs
@@ -221,12 +218,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -239,12 +237,13 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -257,12 +256,13 @@ def mesh(self):
return self._mesh
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -275,18 +275,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def merge_solid_shell(self):
- """Allows to connect merge_solid_shell input to the operator.
+ def merge_solid_shell(self) -> Input:
+ r"""Allows to connect merge_solid_shell input to the operator.
- For shell/solid mixed fields, group in the
- same field all solids and shells
- (false by default). if this pin is
- true, a shell_layer needs to be
- specified.
+ For shell/solid mixed fields, group in the same field all solids and shells (false by default). If this pin is true, a shell_layer needs to be specified.
- Parameters
- ----------
- my_merge_solid_shell : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -299,18 +296,15 @@ def merge_solid_shell(self):
return self._merge_solid_shell
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- 0: top, 1: bottom, 2: bottomtop, 3: mid, 4:
- bottomtopmid. if merge_solid_shell is
- true, this pin needs to be specified
- to a value that extracts only one
- layer (top, bottom or mid).
+ 0: Top, 1: Bottom, 2: BottomTop, 3: Mid, 4: BottomTopMid. If merge_solid_shell is true, this pin needs to be specified to a value that extracts only one layer (Top, Bottom or Mid).
- Parameters
- ----------
- my_shell_layer : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -341,18 +335,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.averaging.to_nodal_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/compression/apply_svd.py b/src/ansys/dpf/core/operators/compression/apply_svd.py
index 36c3ca8b67a..d491d82daa9 100644
--- a/src/ansys/dpf/core/operators/compression/apply_svd.py
+++ b/src/ansys/dpf/core/operators/compression/apply_svd.py
@@ -4,52 +4,41 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class apply_svd(Operator):
- """Computes the coefficients (=U*Sigma) and VT components from SVD.
+ r"""Computes the coefficients (=U*Sigma) and VT components from SVD.
+
Parameters
----------
- field_contaner_to_compress : FieldsContainer
- Fields container to be compressed
- scalar_int : int
- Number of vectors (r) to keep for the future
- reconstraction of the matrix a, ex.
- a[m,n]=coef[m,r]*vt[r,n], where
- coef=u*sigma
- scalar_double : float
- Threshold (precision) as a double, default
- value is 1e-7
- boolean : bool
- Apply svd on the initial input data (true) or
- transposed (square matrix), default
- value is false
+ field_contaner_to_compress: FieldsContainer
+ fields container to be compressed
+ scalar_int: int
+ number of vectors (r) to keep for the future reconstraction of the matrix A, ex. A[m,n]=coef[m,r]*VT[r,n], where coef=U*Sigma
+ scalar_double: float
+ threshold (precision) as a double, default value is 1e-7
+ boolean: bool
+ apply svd on the initial input data (true) or transposed (square matrix), default value is false
Returns
-------
- us_svd : FieldsContainer
- The output entity is a field container (time
- dependant); it contains the
- multiplication of two matrices, u and
- s, where a=u.s.vt
- vt_svd : FieldsContainer
- The output entity is a field container (space
- dependant), containing the vt, where
- a=u.s.vt
- sigma : Field or FieldsContainer
- The output entity is a field (or a field
- container if input fc contains
- several labels, where field contains
- results per label), containing
- singular (s) values of the input
- data, where a=u.s.vt
+ us_svd: FieldsContainer
+ the output entity is a field container (time dependant); it contains the multiplication of two matrices, U and S, where A=U.S.Vt
+ vt_svd: FieldsContainer
+ the output entity is a field container (space dependant), containing the Vt, where A=U.S.Vt
+ sigma: Field or FieldsContainer
+ the output entity is a field (or a field container if input fc contains several labels, where field contains results per label), containing singular (S) values of the input data, where A=U.S.Vt
Examples
--------
@@ -104,10 +93,9 @@ def __init__(
self.inputs.boolean.connect(boolean)
@staticmethod
- def _spec():
- description = (
- """Computes the coefficients (=U*Sigma) and VT components from SVD."""
- )
+ def _spec() -> Specification:
+ description = r"""Computes the coefficients (=U*Sigma) and VT components from SVD.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -115,31 +103,25 @@ def _spec():
name="field_contaner_to_compress",
type_names=["fields_container"],
optional=False,
- document="""Fields container to be compressed""",
+ document=r"""fields container to be compressed""",
),
1: PinSpecification(
name="scalar_int",
type_names=["int32"],
optional=False,
- document="""Number of vectors (r) to keep for the future
- reconstraction of the matrix a, ex.
- a[m,n]=coef[m,r]*vt[r,n], where
- coef=u*sigma""",
+ document=r"""number of vectors (r) to keep for the future reconstraction of the matrix A, ex. A[m,n]=coef[m,r]*VT[r,n], where coef=U*Sigma""",
),
2: PinSpecification(
name="scalar_double",
type_names=["double"],
optional=False,
- document="""Threshold (precision) as a double, default
- value is 1e-7""",
+ document=r"""threshold (precision) as a double, default value is 1e-7""",
),
3: PinSpecification(
name="boolean",
type_names=["bool"],
optional=False,
- document="""Apply svd on the initial input data (true) or
- transposed (square matrix), default
- value is false""",
+ document=r"""apply svd on the initial input data (true) or transposed (square matrix), default value is false""",
),
},
map_output_pin_spec={
@@ -147,36 +129,26 @@ def _spec():
name="us_svd",
type_names=["fields_container"],
optional=False,
- document="""The output entity is a field container (time
- dependant); it contains the
- multiplication of two matrices, u and
- s, where a=u.s.vt""",
+ document=r"""the output entity is a field container (time dependant); it contains the multiplication of two matrices, U and S, where A=U.S.Vt""",
),
1: PinSpecification(
name="vt_svd",
type_names=["fields_container"],
optional=False,
- document="""The output entity is a field container (space
- dependant), containing the vt, where
- a=u.s.vt""",
+ document=r"""the output entity is a field container (space dependant), containing the Vt, where A=U.S.Vt""",
),
2: PinSpecification(
name="sigma",
type_names=["field", "fields_container"],
optional=False,
- document="""The output entity is a field (or a field
- container if input fc contains
- several labels, where field contains
- results per label), containing
- singular (s) values of the input
- data, where a=u.s.vt""",
+ document=r"""the output entity is a field (or a field container if input fc contains several labels, where field contains results per label), containing singular (S) values of the input data, where A=U.S.Vt""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -185,29 +157,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="svd_operator", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsApplySvd:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsApplySvd
+ inputs:
+ An instance of InputsApplySvd.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsApplySvd:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsApplySvd
+ outputs:
+ An instance of OutputsApplySvd.
"""
return super().outputs
@@ -244,14 +223,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._boolean)
@property
- def field_contaner_to_compress(self):
- """Allows to connect field_contaner_to_compress input to the operator.
+ def field_contaner_to_compress(self) -> Input:
+ r"""Allows to connect field_contaner_to_compress input to the operator.
- Fields container to be compressed
+ fields container to be compressed
- Parameters
- ----------
- my_field_contaner_to_compress : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -264,17 +244,15 @@ def field_contaner_to_compress(self):
return self._field_contaner_to_compress
@property
- def scalar_int(self):
- """Allows to connect scalar_int input to the operator.
+ def scalar_int(self) -> Input:
+ r"""Allows to connect scalar_int input to the operator.
- Number of vectors (r) to keep for the future
- reconstraction of the matrix a, ex.
- a[m,n]=coef[m,r]*vt[r,n], where
- coef=u*sigma
+ number of vectors (r) to keep for the future reconstraction of the matrix A, ex. A[m,n]=coef[m,r]*VT[r,n], where coef=U*Sigma
- Parameters
- ----------
- my_scalar_int : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -287,15 +265,15 @@ def scalar_int(self):
return self._scalar_int
@property
- def scalar_double(self):
- """Allows to connect scalar_double input to the operator.
+ def scalar_double(self) -> Input:
+ r"""Allows to connect scalar_double input to the operator.
- Threshold (precision) as a double, default
- value is 1e-7
+ threshold (precision) as a double, default value is 1e-7
- Parameters
- ----------
- my_scalar_double : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -308,16 +286,15 @@ def scalar_double(self):
return self._scalar_double
@property
- def boolean(self):
- """Allows to connect boolean input to the operator.
+ def boolean(self) -> Input:
+ r"""Allows to connect boolean input to the operator.
- Apply svd on the initial input data (true) or
- transposed (square matrix), default
- value is false
+ apply svd on the initial input data (true) or transposed (square matrix), default value is false
- Parameters
- ----------
- my_boolean : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -366,35 +343,41 @@ def __init__(self, op: Operator):
self._outputs.append(self.sigma_as_fields_container)
@property
- def us_svd(self):
- """Allows to get us_svd output of the operator
+ def us_svd(self) -> Output:
+ r"""Allows to get us_svd output of the operator
+
+ the output entity is a field container (time dependant); it contains the multiplication of two matrices, U and S, where A=U.S.Vt
Returns
- ----------
- my_us_svd : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.compression.apply_svd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_us_svd = op.outputs.us_svd()
- """ # noqa: E501
+ """
return self._us_svd
@property
- def vt_svd(self):
- """Allows to get vt_svd output of the operator
+ def vt_svd(self) -> Output:
+ r"""Allows to get vt_svd output of the operator
+
+ the output entity is a field container (space dependant), containing the Vt, where A=U.S.Vt
Returns
- ----------
- my_vt_svd : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.compression.apply_svd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_vt_svd = op.outputs.vt_svd()
- """ # noqa: E501
+ """
return self._vt_svd
diff --git a/src/ansys/dpf/core/operators/compression/apply_zfp.py b/src/ansys/dpf/core/operators/compression/apply_zfp.py
index fdbb6766534..8ad8a7a1db5 100644
--- a/src/ansys/dpf/core/operators/compression/apply_zfp.py
+++ b/src/ansys/dpf/core/operators/compression/apply_zfp.py
@@ -4,71 +4,46 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class apply_zfp(Operator):
- """Compressing input data using one of zfp compression algorithm modes.
+ r"""Compressing input data using one of zfp compression algorithm modes.
+
Parameters
----------
- dataIn : Field or FieldsContainer
- Field or fields container to be compressed
- mode : str or Char
- Zfp mode: fixed-rate ('r'), fixed-precision
- ('p'), fixed-accuracy ('a')
- mode_parameter : int or float
- Mode-corresponding parameter: rate (double) /
- precision (int) / accuracy (double)
- dim : int, optional
- Dimension (1d/2d/3d) for data organization
- before the compression (int; default:
- 2)
- order : int, optional
- Xyz dimensions order, where x (row)
- corresponds to number of elementary
- data, y (col) - number of time steps,
- z - number of components (applicable
- only for 3d data) : 0=xyz, 1=yxz
- (int; default: 0)
- double_absthreshold : float, optional
- Double positive small value. all the values
- smaller than max(small value, max(vi)
- * relative threshold) are considered
- as zero values, (default value:
- 1.0e-18).
- double_relthreshold : float, optional
- Double relative threshold. values smaller
- than (v1 - v2) < max(small value, v1
- * relativetol) are considered
- identical (default value: 1.0e-10).
+ dataIn: Field or FieldsContainer
+ field or fields container to be compressed
+ mode: str or Char
+ zfp mode: fixed-rate ('r'), fixed-precision ('p'), fixed-accuracy ('a')
+ mode_parameter: int or float
+ mode-corresponding parameter: rate (double) / precision (int) / accuracy (double)
+ dim: int, optional
+ dimension (1D/2D/3D) for data organization before the compression (int; default: 2)
+ order: int, optional
+ xyz dimensions order, where x (row) corresponds to number of elementary data, y (col) - number of time steps, z - number of components (applicable only for 3d data) : 0=xyz, 1=yxz (int; default: 0)
+ double_absthreshold: float, optional
+ Double positive small value. All the values smaller than max(small value, max(vi) * relative threshold) are considered as zero values, (default value: 1.0e-18).
+ double_relthreshold: float, optional
+ Double relative threshold. Values smaller than (v1 - v2) < max(small value, v1 * relativeTol) are considered identical (default value: 1.0e-10).
Returns
-------
- compress_speed : float
- The output entity is a double, containing
- compression speed of the input data:
- for elementalnodal location -
- [elements/sec], for nodal location -
- [nodes/sec]
- compress_ratio : float
- The output entity is a double, containing
- compression rate = initial/compressed
- dataOut : CustomTypeFieldsContainer
- The output entity is a 'custom type field
- container'; each
- output field containing compressed
- results corresponding to one
- component data (ie. input vector
- field/fc contains 3 components will
- give 3 output fields), this is not
- the case when input pin3 is set to 3,
- all components will be compressed
- into one field.
+ compress_speed: float
+ the output entity is a double, containing compression speed of the input data: for ElementalNodal location - [elements/sec], for Nodal location - [nodes/sec]
+ compress_ratio: float
+ the output entity is a double, containing compression rate = initial/compressed
+ dataOut: CustomTypeFieldsContainer
+ the output entity is a 'custom type field container'; each output field containing compressed results corresponding to one component data (ie. input vector field/fc contains 3 components will give 3 output fields), this is not the case when input pin3 is set to 3, all components will be compressed into one field.
Examples
--------
@@ -141,10 +116,9 @@ def __init__(
self.inputs.double_relthreshold.connect(double_relthreshold)
@staticmethod
- def _spec():
- description = (
- """Compressing input data using one of zfp compression algorithm modes."""
- )
+ def _spec() -> Specification:
+ description = r"""Compressing input data using one of zfp compression algorithm modes.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -152,59 +126,43 @@ def _spec():
name="dataIn",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container to be compressed""",
+ document=r"""field or fields container to be compressed""",
),
1: PinSpecification(
name="mode",
type_names=["string", "char"],
optional=False,
- document="""Zfp mode: fixed-rate ('r'), fixed-precision
- ('p'), fixed-accuracy ('a')""",
+ document=r"""zfp mode: fixed-rate ('r'), fixed-precision ('p'), fixed-accuracy ('a')""",
),
2: PinSpecification(
name="mode_parameter",
type_names=["int32", "double"],
optional=False,
- document="""Mode-corresponding parameter: rate (double) /
- precision (int) / accuracy (double)""",
+ document=r"""mode-corresponding parameter: rate (double) / precision (int) / accuracy (double)""",
),
3: PinSpecification(
name="dim",
type_names=["int32"],
optional=True,
- document="""Dimension (1d/2d/3d) for data organization
- before the compression (int; default:
- 2)""",
+ document=r"""dimension (1D/2D/3D) for data organization before the compression (int; default: 2)""",
),
4: PinSpecification(
name="order",
type_names=["int32"],
optional=True,
- document="""Xyz dimensions order, where x (row)
- corresponds to number of elementary
- data, y (col) - number of time steps,
- z - number of components (applicable
- only for 3d data) : 0=xyz, 1=yxz
- (int; default: 0)""",
+ document=r"""xyz dimensions order, where x (row) corresponds to number of elementary data, y (col) - number of time steps, z - number of components (applicable only for 3d data) : 0=xyz, 1=yxz (int; default: 0)""",
),
5: PinSpecification(
name="double_absthreshold",
type_names=["double"],
optional=True,
- document="""Double positive small value. all the values
- smaller than max(small value, max(vi)
- * relative threshold) are considered
- as zero values, (default value:
- 1.0e-18).""",
+ document=r"""Double positive small value. All the values smaller than max(small value, max(vi) * relative threshold) are considered as zero values, (default value: 1.0e-18).""",
),
6: PinSpecification(
name="double_relthreshold",
type_names=["double"],
optional=True,
- document="""Double relative threshold. values smaller
- than (v1 - v2) < max(small value, v1
- * relativetol) are considered
- identical (default value: 1.0e-10).""",
+ document=r"""Double relative threshold. Values smaller than (v1 - v2) < max(small value, v1 * relativeTol) are considered identical (default value: 1.0e-10).""",
),
},
map_output_pin_spec={
@@ -212,40 +170,26 @@ def _spec():
name="compress_speed",
type_names=["double"],
optional=False,
- document="""The output entity is a double, containing
- compression speed of the input data:
- for elementalnodal location -
- [elements/sec], for nodal location -
- [nodes/sec]""",
+ document=r"""the output entity is a double, containing compression speed of the input data: for ElementalNodal location - [elements/sec], for Nodal location - [nodes/sec]""",
),
1: PinSpecification(
name="compress_ratio",
type_names=["double"],
optional=False,
- document="""The output entity is a double, containing
- compression rate = initial/compressed""",
+ document=r"""the output entity is a double, containing compression rate = initial/compressed""",
),
2: PinSpecification(
name="dataOut",
type_names=["custom_type_fields_container"],
optional=False,
- document="""The output entity is a 'custom type field
- container'; each
- output field containing compressed
- results corresponding to one
- component data (ie. input vector
- field/fc contains 3 components will
- give 3 output fields), this is not
- the case when input pin3 is set to 3,
- all components will be compressed
- into one field.""",
+ document=r"""the output entity is a 'custom type field container'; each output field containing compressed results corresponding to one component data (ie. input vector field/fc contains 3 components will give 3 output fields), this is not the case when input pin3 is set to 3, all components will be compressed into one field.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -254,29 +198,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="zfp", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsApplyZfp:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsApplyZfp
+ inputs:
+ An instance of InputsApplyZfp.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsApplyZfp:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsApplyZfp
+ outputs:
+ An instance of OutputsApplyZfp.
"""
return super().outputs
@@ -323,14 +274,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._double_relthreshold)
@property
- def dataIn(self):
- """Allows to connect dataIn input to the operator.
+ def dataIn(self) -> Input:
+ r"""Allows to connect dataIn input to the operator.
- Field or fields container to be compressed
+ field or fields container to be compressed
- Parameters
- ----------
- my_dataIn : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -343,15 +295,15 @@ def dataIn(self):
return self._dataIn
@property
- def mode(self):
- """Allows to connect mode input to the operator.
+ def mode(self) -> Input:
+ r"""Allows to connect mode input to the operator.
- Zfp mode: fixed-rate ('r'), fixed-precision
- ('p'), fixed-accuracy ('a')
+ zfp mode: fixed-rate ('r'), fixed-precision ('p'), fixed-accuracy ('a')
- Parameters
- ----------
- my_mode : str or Char
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -364,15 +316,15 @@ def mode(self):
return self._mode
@property
- def mode_parameter(self):
- """Allows to connect mode_parameter input to the operator.
+ def mode_parameter(self) -> Input:
+ r"""Allows to connect mode_parameter input to the operator.
- Mode-corresponding parameter: rate (double) /
- precision (int) / accuracy (double)
+ mode-corresponding parameter: rate (double) / precision (int) / accuracy (double)
- Parameters
- ----------
- my_mode_parameter : int or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -385,16 +337,15 @@ def mode_parameter(self):
return self._mode_parameter
@property
- def dim(self):
- """Allows to connect dim input to the operator.
+ def dim(self) -> Input:
+ r"""Allows to connect dim input to the operator.
- Dimension (1d/2d/3d) for data organization
- before the compression (int; default:
- 2)
+ dimension (1D/2D/3D) for data organization before the compression (int; default: 2)
- Parameters
- ----------
- my_dim : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -407,19 +358,15 @@ def dim(self):
return self._dim
@property
- def order(self):
- """Allows to connect order input to the operator.
+ def order(self) -> Input:
+ r"""Allows to connect order input to the operator.
- Xyz dimensions order, where x (row)
- corresponds to number of elementary
- data, y (col) - number of time steps,
- z - number of components (applicable
- only for 3d data) : 0=xyz, 1=yxz
- (int; default: 0)
+ xyz dimensions order, where x (row) corresponds to number of elementary data, y (col) - number of time steps, z - number of components (applicable only for 3d data) : 0=xyz, 1=yxz (int; default: 0)
- Parameters
- ----------
- my_order : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -432,18 +379,15 @@ def order(self):
return self._order
@property
- def double_absthreshold(self):
- """Allows to connect double_absthreshold input to the operator.
+ def double_absthreshold(self) -> Input:
+ r"""Allows to connect double_absthreshold input to the operator.
- Double positive small value. all the values
- smaller than max(small value, max(vi)
- * relative threshold) are considered
- as zero values, (default value:
- 1.0e-18).
+ Double positive small value. All the values smaller than max(small value, max(vi) * relative threshold) are considered as zero values, (default value: 1.0e-18).
- Parameters
- ----------
- my_double_absthreshold : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -456,17 +400,15 @@ def double_absthreshold(self):
return self._double_absthreshold
@property
- def double_relthreshold(self):
- """Allows to connect double_relthreshold input to the operator.
+ def double_relthreshold(self) -> Input:
+ r"""Allows to connect double_relthreshold input to the operator.
- Double relative threshold. values smaller
- than (v1 - v2) < max(small value, v1
- * relativetol) are considered
- identical (default value: 1.0e-10).
+ Double relative threshold. Values smaller than (v1 - v2) < max(small value, v1 * relativeTol) are considered identical (default value: 1.0e-10).
- Parameters
- ----------
- my_double_relthreshold : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -503,52 +445,61 @@ def __init__(self, op: Operator):
self._outputs.append(self._dataOut)
@property
- def compress_speed(self):
- """Allows to get compress_speed output of the operator
+ def compress_speed(self) -> Output:
+ r"""Allows to get compress_speed output of the operator
+
+ the output entity is a double, containing compression speed of the input data: for ElementalNodal location - [elements/sec], for Nodal location - [nodes/sec]
Returns
- ----------
- my_compress_speed : float
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.compression.apply_zfp()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_compress_speed = op.outputs.compress_speed()
- """ # noqa: E501
+ """
return self._compress_speed
@property
- def compress_ratio(self):
- """Allows to get compress_ratio output of the operator
+ def compress_ratio(self) -> Output:
+ r"""Allows to get compress_ratio output of the operator
+
+ the output entity is a double, containing compression rate = initial/compressed
Returns
- ----------
- my_compress_ratio : float
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.compression.apply_zfp()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_compress_ratio = op.outputs.compress_ratio()
- """ # noqa: E501
+ """
return self._compress_ratio
@property
- def dataOut(self):
- """Allows to get dataOut output of the operator
+ def dataOut(self) -> Output:
+ r"""Allows to get dataOut output of the operator
+
+ the output entity is a 'custom type field container'; each output field containing compressed results corresponding to one component data (ie. input vector field/fc contains 3 components will give 3 output fields), this is not the case when input pin3 is set to 3, all components will be compressed into one field.
Returns
- ----------
- my_dataOut : CustomTypeFieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.compression.apply_zfp()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dataOut = op.outputs.dataOut()
- """ # noqa: E501
+ """
return self._dataOut
diff --git a/src/ansys/dpf/core/operators/compression/kmeans_clustering.py b/src/ansys/dpf/core/operators/compression/kmeans_clustering.py
index 16f228e749d..40d4d93aff2 100644
--- a/src/ansys/dpf/core/operators/compression/kmeans_clustering.py
+++ b/src/ansys/dpf/core/operators/compression/kmeans_clustering.py
@@ -4,38 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class kmeans_clustering(Operator):
- """Apply kMeans clustering to group data depending on the data's non-
- linearity.
+ r"""Apply kMeans clustering to group data depending on the data’s
+ non-linearity.
+
Parameters
----------
- clusters_number : int, optional
- Number of the clusters (dafault is 3)
- formula : str, optional
- Formula ('dist'/'dotprod'), default is 'dist'
- fields_container : FieldsContainer
- An iunput fields container containing the
- data which will be used for the
- clustering
- component_number : int, optional
- Component number as an int (default is 0), ex
- '0' for x-displacement, '1' for
- y-displacement,...
+ clusters_number: int, optional
+ number of the clusters (dafault is 3)
+ formula: str, optional
+ formula ('dist'/'dotprod'), default is 'dist'
+ fields_container: FieldsContainer
+ an iunput fields container containing the data which will be used for the clustering
+ component_number: int, optional
+ component number as an int (default is 0), ex '0' for X-displacement, '1' for Y-displacement,...
Returns
-------
- scoping_clusters : ScopingsContainer
- Scopings container with the space scoping
- (entities' ids) corresponding to each
- of k-clusters
+ scoping_clusters: ScopingsContainer
+ Scopings container with the space scoping (entities' ids) corresponding to each of k-clusters
Examples
--------
@@ -88,9 +87,10 @@ def __init__(
self.inputs.component_number.connect(component_number)
@staticmethod
- def _spec():
- description = """Apply kMeans clustering to group data depending on the data's non-
- linearity."""
+ def _spec() -> Specification:
+ description = r"""Apply kMeans clustering to group data depending on the data’s
+non-linearity.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -98,29 +98,25 @@ def _spec():
name="clusters_number",
type_names=["int32"],
optional=True,
- document="""Number of the clusters (dafault is 3)""",
+ document=r"""number of the clusters (dafault is 3)""",
),
1: PinSpecification(
name="formula",
type_names=["string"],
optional=True,
- document="""Formula ('dist'/'dotprod'), default is 'dist'""",
+ document=r"""formula ('dist'/'dotprod'), default is 'dist'""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""An iunput fields container containing the
- data which will be used for the
- clustering""",
+ document=r"""an iunput fields container containing the data which will be used for the clustering""",
),
3: PinSpecification(
name="component_number",
type_names=["int32"],
optional=True,
- document="""Component number as an int (default is 0), ex
- '0' for x-displacement, '1' for
- y-displacement,...""",
+ document=r"""component number as an int (default is 0), ex '0' for X-displacement, '1' for Y-displacement,...""",
),
},
map_output_pin_spec={
@@ -128,16 +124,14 @@ def _spec():
name="scoping_clusters",
type_names=["scopings_container"],
optional=False,
- document="""Scopings container with the space scoping
- (entities' ids) corresponding to each
- of k-clusters""",
+ document=r"""Scopings container with the space scoping (entities' ids) corresponding to each of k-clusters""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -146,29 +140,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="kmeans_operator", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsKmeansClustering:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsKmeansClustering
+ inputs:
+ An instance of InputsKmeansClustering.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsKmeansClustering:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsKmeansClustering
+ outputs:
+ An instance of OutputsKmeansClustering.
"""
return super().outputs
@@ -207,14 +208,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._component_number)
@property
- def clusters_number(self):
- """Allows to connect clusters_number input to the operator.
+ def clusters_number(self) -> Input:
+ r"""Allows to connect clusters_number input to the operator.
- Number of the clusters (dafault is 3)
+ number of the clusters (dafault is 3)
- Parameters
- ----------
- my_clusters_number : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -227,14 +229,15 @@ def clusters_number(self):
return self._clusters_number
@property
- def formula(self):
- """Allows to connect formula input to the operator.
+ def formula(self) -> Input:
+ r"""Allows to connect formula input to the operator.
- Formula ('dist'/'dotprod'), default is 'dist'
+ formula ('dist'/'dotprod'), default is 'dist'
- Parameters
- ----------
- my_formula : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -247,16 +250,15 @@ def formula(self):
return self._formula
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- An iunput fields container containing the
- data which will be used for the
- clustering
+ an iunput fields container containing the data which will be used for the clustering
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -269,16 +271,15 @@ def fields_container(self):
return self._fields_container
@property
- def component_number(self):
- """Allows to connect component_number input to the operator.
+ def component_number(self) -> Input:
+ r"""Allows to connect component_number input to the operator.
- Component number as an int (default is 0), ex
- '0' for x-displacement, '1' for
- y-displacement,...
+ component number as an int (default is 0), ex '0' for X-displacement, '1' for Y-displacement,...
- Parameters
- ----------
- my_component_number : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -309,18 +310,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._scoping_clusters)
@property
- def scoping_clusters(self):
- """Allows to get scoping_clusters output of the operator
+ def scoping_clusters(self) -> Output:
+ r"""Allows to get scoping_clusters output of the operator
+
+ Scopings container with the space scoping (entities' ids) corresponding to each of k-clusters
Returns
- ----------
- my_scoping_clusters : ScopingsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.compression.kmeans_clustering()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping_clusters = op.outputs.scoping_clusters()
- """ # noqa: E501
+ """
return self._scoping_clusters
diff --git a/src/ansys/dpf/core/operators/compression/sketch_matrix.py b/src/ansys/dpf/core/operators/compression/sketch_matrix.py
index a0d97091a85..f385b45277c 100644
--- a/src/ansys/dpf/core/operators/compression/sketch_matrix.py
+++ b/src/ansys/dpf/core/operators/compression/sketch_matrix.py
@@ -4,57 +4,49 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class sketch_matrix(Operator):
- """Compress fields container using an orthonormal randomized (Gaussian
+ r"""Compress fields container using an orthonormal randomized (Gaussian
distribution) sketch matrix.
+
Parameters
----------
- dataIn : FieldsContainer
- Fields container to be compressed. it is
- assumed that all fields have the same
- structure (scoping, num_entities).
- sketch_matrix : Field, optional
+ dataIn: FieldsContainer
+ Fields container to be compressed. It is assumed that all fields have the same structure (scoping, num_entities).
+ sketch_matrix: Field, optional
Field containing the sketch matrix.
- rank : int
+ rank: int
Rank of the output matrix fields_container.
- random_generator_seed : int, optional
- Value used as the seed for the random number
- generator. default = 0.
- mean : float, optional
- Mean value of the random numbers matrix.
- default = 0.0.
- standard_deviation : float, optional
- Standard deviation of the random numbers
- matrix. default = 1.0.
- othogonalized : bool, optional
- Orthogonalize matrix. default = true.
- power_iterations : int, optional
- Number of power iterations to perform. a
- larger number of iterations impact
- performance, but increase the
- accuracy. default = 0.
+ random_generator_seed: int, optional
+ Value used as the seed for the random number generator. Default = 0.
+ mean: float, optional
+ Mean value of the random numbers matrix. Default = 0.0.
+ standard_deviation: float, optional
+ Standard deviation of the random numbers matrix. Default = 1.0.
+ othogonalized: bool, optional
+ Orthogonalize matrix. Default = True.
+ power_iterations: int, optional
+ Number of power iterations to perform. A larger number of iterations impact performance, but increase the accuracy. Default = 0.
Returns
-------
- dataOut : FieldsContainer
- The output matrix is a 'fields_container';
- each field correspond to the
- multiplication of the sketch matrix
- by the original fields.
- sketch_matrix : Field
+ dataOut: FieldsContainer
+ the output matrix is a 'fields_container'; each field correspond to the multiplication of the sketch matrix by the original fields.
+ sketch_matrix: Field
Field containing the sketch matrix.
- shell_field : Field
- Empty field containing the shell of the
- original field for future
- reconstruction.
+ shell_field: Field
+ Empty field containing the shell of the original field for future reconstruction.
Examples
--------
@@ -133,9 +125,10 @@ def __init__(
self.inputs.power_iterations.connect(power_iterations)
@staticmethod
- def _spec():
- description = """Compress fields container using an orthonormal randomized (Gaussian
- distribution) sketch matrix."""
+ def _spec() -> Specification:
+ description = r"""Compress fields container using an orthonormal randomized (Gaussian
+distribution) sketch matrix.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -143,57 +136,49 @@ def _spec():
name="dataIn",
type_names=["fields_container"],
optional=False,
- document="""Fields container to be compressed. it is
- assumed that all fields have the same
- structure (scoping, num_entities).""",
+ document=r"""Fields container to be compressed. It is assumed that all fields have the same structure (scoping, num_entities).""",
),
1: PinSpecification(
name="sketch_matrix",
type_names=["field"],
optional=True,
- document="""Field containing the sketch matrix.""",
+ document=r"""Field containing the sketch matrix.""",
),
2: PinSpecification(
name="rank",
type_names=["int32"],
optional=False,
- document="""Rank of the output matrix fields_container.""",
+ document=r"""Rank of the output matrix fields_container.""",
),
3: PinSpecification(
name="random_generator_seed",
type_names=["int32"],
optional=True,
- document="""Value used as the seed for the random number
- generator. default = 0.""",
+ document=r"""Value used as the seed for the random number generator. Default = 0.""",
),
4: PinSpecification(
name="mean",
type_names=["double"],
optional=True,
- document="""Mean value of the random numbers matrix.
- default = 0.0.""",
+ document=r"""Mean value of the random numbers matrix. Default = 0.0.""",
),
5: PinSpecification(
name="standard_deviation",
type_names=["double"],
optional=True,
- document="""Standard deviation of the random numbers
- matrix. default = 1.0.""",
+ document=r"""Standard deviation of the random numbers matrix. Default = 1.0.""",
),
6: PinSpecification(
name="othogonalized",
type_names=["bool"],
optional=True,
- document="""Orthogonalize matrix. default = true.""",
+ document=r"""Orthogonalize matrix. Default = True.""",
),
7: PinSpecification(
name="power_iterations",
type_names=["int32"],
optional=True,
- document="""Number of power iterations to perform. a
- larger number of iterations impact
- performance, but increase the
- accuracy. default = 0.""",
+ document=r"""Number of power iterations to perform. A larger number of iterations impact performance, but increase the accuracy. Default = 0.""",
),
},
map_output_pin_spec={
@@ -201,31 +186,26 @@ def _spec():
name="dataOut",
type_names=["fields_container"],
optional=False,
- document="""The output matrix is a 'fields_container';
- each field correspond to the
- multiplication of the sketch matrix
- by the original fields.""",
+ document=r"""the output matrix is a 'fields_container'; each field correspond to the multiplication of the sketch matrix by the original fields.""",
),
1: PinSpecification(
name="sketch_matrix",
type_names=["field"],
optional=False,
- document="""Field containing the sketch matrix.""",
+ document=r"""Field containing the sketch matrix.""",
),
2: PinSpecification(
name="shell_field",
type_names=["field"],
optional=False,
- document="""Empty field containing the shell of the
- original field for future
- reconstruction.""",
+ document=r"""Empty field containing the shell of the original field for future reconstruction.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -234,29 +214,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="sketch_matrix_compress", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSketchMatrix:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSketchMatrix
+ inputs:
+ An instance of InputsSketchMatrix.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSketchMatrix:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSketchMatrix
+ outputs:
+ An instance of OutputsSketchMatrix.
"""
return super().outputs
@@ -309,16 +296,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._power_iterations)
@property
- def dataIn(self):
- """Allows to connect dataIn input to the operator.
+ def dataIn(self) -> Input:
+ r"""Allows to connect dataIn input to the operator.
- Fields container to be compressed. it is
- assumed that all fields have the same
- structure (scoping, num_entities).
+ Fields container to be compressed. It is assumed that all fields have the same structure (scoping, num_entities).
- Parameters
- ----------
- my_dataIn : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -331,14 +317,15 @@ def dataIn(self):
return self._dataIn
@property
- def sketch_matrix(self):
- """Allows to connect sketch_matrix input to the operator.
+ def sketch_matrix(self) -> Input:
+ r"""Allows to connect sketch_matrix input to the operator.
Field containing the sketch matrix.
- Parameters
- ----------
- my_sketch_matrix : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -351,14 +338,15 @@ def sketch_matrix(self):
return self._sketch_matrix
@property
- def rank(self):
- """Allows to connect rank input to the operator.
+ def rank(self) -> Input:
+ r"""Allows to connect rank input to the operator.
Rank of the output matrix fields_container.
- Parameters
- ----------
- my_rank : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -371,15 +359,15 @@ def rank(self):
return self._rank
@property
- def random_generator_seed(self):
- """Allows to connect random_generator_seed input to the operator.
+ def random_generator_seed(self) -> Input:
+ r"""Allows to connect random_generator_seed input to the operator.
- Value used as the seed for the random number
- generator. default = 0.
+ Value used as the seed for the random number generator. Default = 0.
- Parameters
- ----------
- my_random_generator_seed : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -392,15 +380,15 @@ def random_generator_seed(self):
return self._random_generator_seed
@property
- def mean(self):
- """Allows to connect mean input to the operator.
+ def mean(self) -> Input:
+ r"""Allows to connect mean input to the operator.
- Mean value of the random numbers matrix.
- default = 0.0.
+ Mean value of the random numbers matrix. Default = 0.0.
- Parameters
- ----------
- my_mean : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -413,15 +401,15 @@ def mean(self):
return self._mean
@property
- def standard_deviation(self):
- """Allows to connect standard_deviation input to the operator.
+ def standard_deviation(self) -> Input:
+ r"""Allows to connect standard_deviation input to the operator.
- Standard deviation of the random numbers
- matrix. default = 1.0.
+ Standard deviation of the random numbers matrix. Default = 1.0.
- Parameters
- ----------
- my_standard_deviation : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -434,14 +422,15 @@ def standard_deviation(self):
return self._standard_deviation
@property
- def othogonalized(self):
- """Allows to connect othogonalized input to the operator.
+ def othogonalized(self) -> Input:
+ r"""Allows to connect othogonalized input to the operator.
- Orthogonalize matrix. default = true.
+ Orthogonalize matrix. Default = True.
- Parameters
- ----------
- my_othogonalized : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -454,17 +443,15 @@ def othogonalized(self):
return self._othogonalized
@property
- def power_iterations(self):
- """Allows to connect power_iterations input to the operator.
+ def power_iterations(self) -> Input:
+ r"""Allows to connect power_iterations input to the operator.
- Number of power iterations to perform. a
- larger number of iterations impact
- performance, but increase the
- accuracy. default = 0.
+ Number of power iterations to perform. A larger number of iterations impact performance, but increase the accuracy. Default = 0.
- Parameters
- ----------
- my_power_iterations : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -501,52 +488,61 @@ def __init__(self, op: Operator):
self._outputs.append(self._shell_field)
@property
- def dataOut(self):
- """Allows to get dataOut output of the operator
+ def dataOut(self) -> Output:
+ r"""Allows to get dataOut output of the operator
+
+ the output matrix is a 'fields_container'; each field correspond to the multiplication of the sketch matrix by the original fields.
Returns
- ----------
- my_dataOut : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.compression.sketch_matrix()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dataOut = op.outputs.dataOut()
- """ # noqa: E501
+ """
return self._dataOut
@property
- def sketch_matrix(self):
- """Allows to get sketch_matrix output of the operator
+ def sketch_matrix(self) -> Output:
+ r"""Allows to get sketch_matrix output of the operator
+
+ Field containing the sketch matrix.
Returns
- ----------
- my_sketch_matrix : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.compression.sketch_matrix()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_sketch_matrix = op.outputs.sketch_matrix()
- """ # noqa: E501
+ """
return self._sketch_matrix
@property
- def shell_field(self):
- """Allows to get shell_field output of the operator
+ def shell_field(self) -> Output:
+ r"""Allows to get shell_field output of the operator
+
+ Empty field containing the shell of the original field for future reconstruction.
Returns
- ----------
- my_shell_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.compression.sketch_matrix()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_shell_field = op.outputs.shell_field()
- """ # noqa: E501
+ """
return self._shell_field
diff --git a/src/ansys/dpf/core/operators/compression/sketch_matrix_decompress.py b/src/ansys/dpf/core/operators/compression/sketch_matrix_decompress.py
index cb3ca44cb06..1c7a0591b0b 100644
--- a/src/ansys/dpf/core/operators/compression/sketch_matrix_decompress.py
+++ b/src/ansys/dpf/core/operators/compression/sketch_matrix_decompress.py
@@ -4,36 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class sketch_matrix_decompress(Operator):
- """Decompress fields container using an orthonormal randomized (Gaussian
+ r"""Decompress fields container using an orthonormal randomized (Gaussian
distribution) sketch matrix.
+
Parameters
----------
- dataIn : FieldsContainer
- Fields container to be decompressed. it is
- assumed that all fields have the same
- structure (scoping, num_entities).
- sketch_matrix : Field
+ dataIn: FieldsContainer
+ Fields container to be decompressed. It is assumed that all fields have the same structure (scoping, num_entities).
+ sketch_matrix: Field
Field containing the sketch matrix.
- shell_field : Field
- Empty field containing the shell of the field
- of decompressed data.
+ shell_field: Field
+ Empty field containing the shell of the field of decompressed data.
Returns
-------
- dataOut : FieldsContainer
- The output matrix is a 'fields_container';
- each field correspond to the
- multiplication of the sketch matrix
- by the original fields.
+ dataOut: FieldsContainer
+ the output matrix is a 'fields_container'; each field correspond to the multiplication of the sketch matrix by the original fields.
Examples
--------
@@ -80,9 +79,10 @@ def __init__(
self.inputs.shell_field.connect(shell_field)
@staticmethod
- def _spec():
- description = """Decompress fields container using an orthonormal randomized (Gaussian
- distribution) sketch matrix."""
+ def _spec() -> Specification:
+ description = r"""Decompress fields container using an orthonormal randomized (Gaussian
+distribution) sketch matrix.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -90,22 +90,19 @@ def _spec():
name="dataIn",
type_names=["fields_container"],
optional=False,
- document="""Fields container to be decompressed. it is
- assumed that all fields have the same
- structure (scoping, num_entities).""",
+ document=r"""Fields container to be decompressed. It is assumed that all fields have the same structure (scoping, num_entities).""",
),
1: PinSpecification(
name="sketch_matrix",
type_names=["field"],
optional=False,
- document="""Field containing the sketch matrix.""",
+ document=r"""Field containing the sketch matrix.""",
),
2: PinSpecification(
name="shell_field",
type_names=["field"],
optional=False,
- document="""Empty field containing the shell of the field
- of decompressed data.""",
+ document=r"""Empty field containing the shell of the field of decompressed data.""",
),
},
map_output_pin_spec={
@@ -113,17 +110,14 @@ def _spec():
name="dataOut",
type_names=["fields_container"],
optional=False,
- document="""The output matrix is a 'fields_container';
- each field correspond to the
- multiplication of the sketch matrix
- by the original fields.""",
+ document=r"""the output matrix is a 'fields_container'; each field correspond to the multiplication of the sketch matrix by the original fields.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -132,29 +126,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="sketch_matrix_decompress", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSketchMatrixDecompress:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSketchMatrixDecompress
+ inputs:
+ An instance of InputsSketchMatrixDecompress.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSketchMatrixDecompress:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSketchMatrixDecompress
+ outputs:
+ An instance of OutputsSketchMatrixDecompress.
"""
return super().outputs
@@ -189,16 +190,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_field)
@property
- def dataIn(self):
- """Allows to connect dataIn input to the operator.
+ def dataIn(self) -> Input:
+ r"""Allows to connect dataIn input to the operator.
- Fields container to be decompressed. it is
- assumed that all fields have the same
- structure (scoping, num_entities).
+ Fields container to be decompressed. It is assumed that all fields have the same structure (scoping, num_entities).
- Parameters
- ----------
- my_dataIn : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -211,14 +211,15 @@ def dataIn(self):
return self._dataIn
@property
- def sketch_matrix(self):
- """Allows to connect sketch_matrix input to the operator.
+ def sketch_matrix(self) -> Input:
+ r"""Allows to connect sketch_matrix input to the operator.
Field containing the sketch matrix.
- Parameters
- ----------
- my_sketch_matrix : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -231,15 +232,15 @@ def sketch_matrix(self):
return self._sketch_matrix
@property
- def shell_field(self):
- """Allows to connect shell_field input to the operator.
+ def shell_field(self) -> Input:
+ r"""Allows to connect shell_field input to the operator.
- Empty field containing the shell of the field
- of decompressed data.
+ Empty field containing the shell of the field of decompressed data.
- Parameters
- ----------
- my_shell_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -270,18 +271,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._dataOut)
@property
- def dataOut(self):
- """Allows to get dataOut output of the operator
+ def dataOut(self) -> Output:
+ r"""Allows to get dataOut output of the operator
+
+ the output matrix is a 'fields_container'; each field correspond to the multiplication of the sketch matrix by the original fields.
Returns
- ----------
- my_dataOut : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.compression.sketch_matrix_decompress()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dataOut = op.outputs.dataOut()
- """ # noqa: E501
+ """
return self._dataOut
diff --git a/src/ansys/dpf/core/operators/compression/zfp_decompress.py b/src/ansys/dpf/core/operators/compression/zfp_decompress.py
index b4f1c0a556b..0ff469a0fc3 100644
--- a/src/ansys/dpf/core/operators/compression/zfp_decompress.py
+++ b/src/ansys/dpf/core/operators/compression/zfp_decompress.py
@@ -4,34 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class zfp_decompress(Operator):
- """zfp decompression using the information about compression written into
+ r"""zfp decompression using the information about compression written into
the properties of the field(s)
+
Parameters
----------
- dataIn : CustomTypeFieldsContainer
- Custom type field container from
- zfp_compression operator to
- decompress
+ dataIn: CustomTypeFieldsContainer
+ custom type field container from zfp_compression operator to decompress
Returns
-------
- dataOut : Field or FieldsContainer
- The output entity is a field or a fields
- container; it contains decompressed
- data
- decompress_speed : float
- The output entity is a double, containing
- decompression speed (mb/sec)
+ dataOut: Field or FieldsContainer
+ the output entity is a field or a fields container; it contains decompressed data
+ decompress_speed: float
+ the output entity is a double, containing decompression speed (mb/sec)
Examples
--------
@@ -62,9 +62,10 @@ def __init__(self, dataIn=None, config=None, server=None):
self.inputs.dataIn.connect(dataIn)
@staticmethod
- def _spec():
- description = """zfp decompression using the information about compression written into
- the properties of the field(s)"""
+ def _spec() -> Specification:
+ description = r"""zfp decompression using the information about compression written into
+the properties of the field(s)
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -72,9 +73,7 @@ def _spec():
name="dataIn",
type_names=["custom_type_fields_container"],
optional=False,
- document="""Custom type field container from
- zfp_compression operator to
- decompress""",
+ document=r"""custom type field container from zfp_compression operator to decompress""",
),
},
map_output_pin_spec={
@@ -82,23 +81,20 @@ def _spec():
name="dataOut",
type_names=["field", "fields_container"],
optional=False,
- document="""The output entity is a field or a fields
- container; it contains decompressed
- data""",
+ document=r"""the output entity is a field or a fields container; it contains decompressed data""",
),
1: PinSpecification(
name="decompress_speed",
type_names=["double"],
optional=False,
- document="""The output entity is a double, containing
- decompression speed (mb/sec)""",
+ document=r"""the output entity is a double, containing decompression speed (mb/sec)""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -107,29 +103,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="zfp_decompress", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsZfpDecompress:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsZfpDecompress
+ inputs:
+ An instance of InputsZfpDecompress.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsZfpDecompress:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsZfpDecompress
+ outputs:
+ An instance of OutputsZfpDecompress.
"""
return super().outputs
@@ -152,16 +155,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._dataIn)
@property
- def dataIn(self):
- """Allows to connect dataIn input to the operator.
+ def dataIn(self) -> Input:
+ r"""Allows to connect dataIn input to the operator.
- Custom type field container from
- zfp_compression operator to
- decompress
+ custom type field container from zfp_compression operator to decompress
- Parameters
- ----------
- my_dataIn : CustomTypeFieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -209,18 +211,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._decompress_speed)
@property
- def decompress_speed(self):
- """Allows to get decompress_speed output of the operator
+ def decompress_speed(self) -> Output:
+ r"""Allows to get decompress_speed output of the operator
+
+ the output entity is a double, containing decompression speed (mb/sec)
Returns
- ----------
- my_decompress_speed : float
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.compression.zfp_decompress()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_decompress_speed = op.outputs.decompress_speed()
- """ # noqa: E501
+ """
return self._decompress_speed
diff --git a/src/ansys/dpf/core/operators/filter/abc_weightings.py b/src/ansys/dpf/core/operators/filter/abc_weightings.py
index af20ea89d9e..3d96405694a 100644
--- a/src/ansys/dpf/core/operators/filter/abc_weightings.py
+++ b/src/ansys/dpf/core/operators/filter/abc_weightings.py
@@ -4,35 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class abc_weightings(Operator):
- """Computes ABC-weightings for the amplitude spectrum in dB units.
+ r"""Computes ABC-weightings for the amplitude spectrum in dB units.
+
Parameters
----------
- fields_container : FieldsContainer
- Data to be weighted in db units.
- weighting_type : int
- If this pin is set to 0, the a-weighting is
- computed, 1 the b-weigting is
- computed and 2 the c-weightings is
- computed.
- shape_by_tf_scoping : bool
- If this pin is set to true, each field of the
- input fields container is defined by
- time freq scoping and not by ids.
- default is false
+ fields_container: FieldsContainer
+ data to be weighted in dB units.
+ weighting_type: int
+ if this pin is set to 0, the A-weighting is computed, 1 the B-weigting is computed and 2 the C-weightings is computed.
+ shape_by_tf_scoping: bool
+ if this pin is set to true, each field of the input fields container is defined by time freq scoping and not by ids. Default is false
Returns
-------
- weightings : FieldsContainer
- Weighted data in db units.
+ weightings: FieldsContainer
+ weighted data in dB units.
Examples
--------
@@ -79,10 +78,9 @@ def __init__(
self.inputs.shape_by_tf_scoping.connect(shape_by_tf_scoping)
@staticmethod
- def _spec():
- description = (
- """Computes ABC-weightings for the amplitude spectrum in dB units."""
- )
+ def _spec() -> Specification:
+ description = r"""Computes ABC-weightings for the amplitude spectrum in dB units.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -90,25 +88,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Data to be weighted in db units.""",
+ document=r"""data to be weighted in dB units.""",
),
1: PinSpecification(
name="weighting_type",
type_names=["int32"],
optional=False,
- document="""If this pin is set to 0, the a-weighting is
- computed, 1 the b-weigting is
- computed and 2 the c-weightings is
- computed.""",
+ document=r"""if this pin is set to 0, the A-weighting is computed, 1 the B-weigting is computed and 2 the C-weightings is computed.""",
),
2: PinSpecification(
name="shape_by_tf_scoping",
type_names=["bool"],
optional=False,
- document="""If this pin is set to true, each field of the
- input fields container is defined by
- time freq scoping and not by ids.
- default is false""",
+ document=r"""if this pin is set to true, each field of the input fields container is defined by time freq scoping and not by ids. Default is false""",
),
},
map_output_pin_spec={
@@ -116,14 +108,14 @@ def _spec():
name="weightings",
type_names=["fields_container"],
optional=False,
- document="""Weighted data in db units.""",
+ document=r"""weighted data in dB units.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -132,29 +124,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="abc_weightings", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAbcWeightings:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAbcWeightings
+ inputs:
+ An instance of InputsAbcWeightings.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAbcWeightings:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAbcWeightings
+ outputs:
+ An instance of OutputsAbcWeightings.
"""
return super().outputs
@@ -187,14 +186,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shape_by_tf_scoping)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Data to be weighted in db units.
+ data to be weighted in dB units.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -207,17 +207,15 @@ def fields_container(self):
return self._fields_container
@property
- def weighting_type(self):
- """Allows to connect weighting_type input to the operator.
+ def weighting_type(self) -> Input:
+ r"""Allows to connect weighting_type input to the operator.
- If this pin is set to 0, the a-weighting is
- computed, 1 the b-weigting is
- computed and 2 the c-weightings is
- computed.
+ if this pin is set to 0, the A-weighting is computed, 1 the B-weigting is computed and 2 the C-weightings is computed.
- Parameters
- ----------
- my_weighting_type : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -230,17 +228,15 @@ def weighting_type(self):
return self._weighting_type
@property
- def shape_by_tf_scoping(self):
- """Allows to connect shape_by_tf_scoping input to the operator.
+ def shape_by_tf_scoping(self) -> Input:
+ r"""Allows to connect shape_by_tf_scoping input to the operator.
- If this pin is set to true, each field of the
- input fields container is defined by
- time freq scoping and not by ids.
- default is false
+ if this pin is set to true, each field of the input fields container is defined by time freq scoping and not by ids. Default is false
- Parameters
- ----------
- my_shape_by_tf_scoping : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -271,18 +267,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._weightings)
@property
- def weightings(self):
- """Allows to get weightings output of the operator
+ def weightings(self) -> Output:
+ r"""Allows to get weightings output of the operator
+
+ weighted data in dB units.
Returns
- ----------
- my_weightings : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.abc_weightings()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_weightings = op.outputs.weightings()
- """ # noqa: E501
+ """
return self._weightings
diff --git a/src/ansys/dpf/core/operators/filter/field_band_pass.py b/src/ansys/dpf/core/operators/filter/field_band_pass.py
index 423da756a4f..f97e902618f 100644
--- a/src/ansys/dpf/core/operators/filter/field_band_pass.py
+++ b/src/ansys/dpf/core/operators/filter/field_band_pass.py
@@ -4,33 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class field_band_pass(Operator):
- """The band pass filter returns all the values above (but not equal to)
- the minimum threshold value and below (but not equal to) the
- maximum threshold value in input.
+ r"""The band pass filter returns all the values above (but not equal to) the
+ minimum threshold value and below (but not equal to) the maximum
+ threshold value in input.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- min_threshold : float or Field
- A minimum threshold scalar or a field
- containing one value is expected.
- max_threshold : float or Field, optional
- A maximum threshold scalar or a field
- containing one value is expected.
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ min_threshold: float or Field
+ A minimum threshold scalar or a field containing one value is expected.
+ max_threshold: float or Field, optional
+ A maximum threshold scalar or a field containing one value is expected.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -77,10 +79,11 @@ def __init__(
self.inputs.max_threshold.connect(max_threshold)
@staticmethod
- def _spec():
- description = """The band pass filter returns all the values above (but not equal to)
- the minimum threshold value and below (but not equal to)
- the maximum threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The band pass filter returns all the values above (but not equal to) the
+minimum threshold value and below (but not equal to) the maximum
+threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,22 +91,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="min_threshold",
type_names=["double", "field"],
optional=False,
- document="""A minimum threshold scalar or a field
- containing one value is expected.""",
+ document=r"""A minimum threshold scalar or a field containing one value is expected.""",
),
2: PinSpecification(
name="max_threshold",
type_names=["double", "field"],
optional=True,
- document="""A maximum threshold scalar or a field
- containing one value is expected.""",
+ document=r"""A maximum threshold scalar or a field containing one value is expected.""",
),
},
map_output_pin_spec={
@@ -111,14 +111,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -127,29 +127,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="core::field::band_pass", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFieldBandPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFieldBandPass
+ inputs:
+ An instance of InputsFieldBandPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFieldBandPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFieldBandPass
+ outputs:
+ An instance of OutputsFieldBandPass.
"""
return super().outputs
@@ -180,15 +187,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._max_threshold)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -201,15 +208,15 @@ def field(self):
return self._field
@property
- def min_threshold(self):
- """Allows to connect min_threshold input to the operator.
+ def min_threshold(self) -> Input:
+ r"""Allows to connect min_threshold input to the operator.
- A minimum threshold scalar or a field
- containing one value is expected.
+ A minimum threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_min_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -222,15 +229,15 @@ def min_threshold(self):
return self._min_threshold
@property
- def max_threshold(self):
- """Allows to connect max_threshold input to the operator.
+ def max_threshold(self) -> Input:
+ r"""Allows to connect max_threshold input to the operator.
- A maximum threshold scalar or a field
- containing one value is expected.
+ A maximum threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_max_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -261,18 +268,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.field_band_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/filter/field_band_pass_fc.py b/src/ansys/dpf/core/operators/filter/field_band_pass_fc.py
index 79f26382cc5..11d012740c9 100644
--- a/src/ansys/dpf/core/operators/filter/field_band_pass_fc.py
+++ b/src/ansys/dpf/core/operators/filter/field_band_pass_fc.py
@@ -4,33 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class field_band_pass_fc(Operator):
- """The band pass filter returns all the values above (but not equal to)
- the minimum threshold value and below (but not equal to) the
- maximum threshold value in input.
+ r"""The band pass filter returns all the values above (but not equal to) the
+ minimum threshold value and below (but not equal to) the maximum
+ threshold value in input.
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
- min_threshold : float or Field
- A minimum threshold scalar or a field
- containing one value is expected.
- max_threshold : float or Field, optional
- A maximum threshold scalar or a field
- containing one value is expected.
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
+ min_threshold: float or Field
+ A minimum threshold scalar or a field containing one value is expected.
+ max_threshold: float or Field, optional
+ A maximum threshold scalar or a field containing one value is expected.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -77,10 +79,11 @@ def __init__(
self.inputs.max_threshold.connect(max_threshold)
@staticmethod
- def _spec():
- description = """The band pass filter returns all the values above (but not equal to)
- the minimum threshold value and below (but not equal to)
- the maximum threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The band pass filter returns all the values above (but not equal to) the
+minimum threshold value and below (but not equal to) the maximum
+threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,22 +91,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="min_threshold",
type_names=["double", "field"],
optional=False,
- document="""A minimum threshold scalar or a field
- containing one value is expected.""",
+ document=r"""A minimum threshold scalar or a field containing one value is expected.""",
),
2: PinSpecification(
name="max_threshold",
type_names=["double", "field"],
optional=True,
- document="""A maximum threshold scalar or a field
- containing one value is expected.""",
+ document=r"""A maximum threshold scalar or a field containing one value is expected.""",
),
},
map_output_pin_spec={
@@ -111,14 +111,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -127,29 +127,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="core::field::band_pass_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFieldBandPassFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFieldBandPassFc
+ inputs:
+ An instance of InputsFieldBandPassFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFieldBandPassFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFieldBandPassFc
+ outputs:
+ An instance of OutputsFieldBandPassFc.
"""
return super().outputs
@@ -182,15 +189,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._max_threshold)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -203,15 +210,15 @@ def fields_container(self):
return self._fields_container
@property
- def min_threshold(self):
- """Allows to connect min_threshold input to the operator.
+ def min_threshold(self) -> Input:
+ r"""Allows to connect min_threshold input to the operator.
- A minimum threshold scalar or a field
- containing one value is expected.
+ A minimum threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_min_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -224,15 +231,15 @@ def min_threshold(self):
return self._min_threshold
@property
- def max_threshold(self):
- """Allows to connect max_threshold input to the operator.
+ def max_threshold(self) -> Input:
+ r"""Allows to connect max_threshold input to the operator.
- A maximum threshold scalar or a field
- containing one value is expected.
+ A maximum threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_max_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -263,18 +270,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.field_band_pass_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/filter/field_high_pass.py b/src/ansys/dpf/core/operators/filter/field_high_pass.py
index 00fa3735ac5..1b0a325ce1a 100644
--- a/src/ansys/dpf/core/operators/filter/field_high_pass.py
+++ b/src/ansys/dpf/core/operators/filter/field_high_pass.py
@@ -4,34 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class field_high_pass(Operator):
- """The high pass filter returns all the values above (but not equal to)
- the threshold value in input.
+ r"""The high pass filter returns all the values above (but not equal to) the
+ threshold value in input.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- threshold : float or Field
- A threshold scalar or a field containing one
- value is expected.
- both : bool, optional
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ threshold: float or Field
+ A threshold scalar or a field containing one value is expected.
+ both: bool, optional
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -71,9 +71,10 @@ def __init__(self, field=None, threshold=None, both=None, config=None, server=No
self.inputs.both.connect(both)
@staticmethod
- def _spec():
- description = """The high pass filter returns all the values above (but not equal to)
- the threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The high pass filter returns all the values above (but not equal to) the
+threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -81,24 +82,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="threshold",
type_names=["double", "field"],
optional=False,
- document="""A threshold scalar or a field containing one
- value is expected.""",
+ document=r"""A threshold scalar or a field containing one value is expected.""",
),
2: PinSpecification(
name="both",
type_names=["bool"],
optional=True,
- document="""The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.""",
+ document=r"""The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.""",
),
},
map_output_pin_spec={
@@ -106,14 +102,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -122,29 +118,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="core::field::high_pass", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFieldHighPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFieldHighPass
+ inputs:
+ An instance of InputsFieldHighPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFieldHighPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFieldHighPass
+ outputs:
+ An instance of OutputsFieldHighPass.
"""
return super().outputs
@@ -175,15 +178,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._both)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -196,15 +199,15 @@ def field(self):
return self._field
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- A threshold scalar or a field containing one
- value is expected.
+ A threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -217,17 +220,15 @@ def threshold(self):
return self._threshold
@property
- def both(self):
- """Allows to connect both input to the operator.
+ def both(self) -> Input:
+ r"""Allows to connect both input to the operator.
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
- Parameters
- ----------
- my_both : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -258,18 +259,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.field_high_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/filter/field_high_pass_fc.py b/src/ansys/dpf/core/operators/filter/field_high_pass_fc.py
index 4ed87e20e41..84e45d5e97a 100644
--- a/src/ansys/dpf/core/operators/filter/field_high_pass_fc.py
+++ b/src/ansys/dpf/core/operators/filter/field_high_pass_fc.py
@@ -4,34 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class field_high_pass_fc(Operator):
- """The high pass filter returns all the values above (but not equal to)
- the threshold value in input.
+ r"""The high pass filter returns all the values above (but not equal to) the
+ threshold value in input.
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
- threshold : float or Field
- A threshold scalar or a field containing one
- value is expected.
- both : bool, optional
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
+ threshold: float or Field
+ A threshold scalar or a field containing one value is expected.
+ both: bool, optional
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -73,9 +73,10 @@ def __init__(
self.inputs.both.connect(both)
@staticmethod
- def _spec():
- description = """The high pass filter returns all the values above (but not equal to)
- the threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The high pass filter returns all the values above (but not equal to) the
+threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -83,24 +84,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="threshold",
type_names=["double", "field"],
optional=False,
- document="""A threshold scalar or a field containing one
- value is expected.""",
+ document=r"""A threshold scalar or a field containing one value is expected.""",
),
2: PinSpecification(
name="both",
type_names=["bool"],
optional=True,
- document="""The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.""",
+ document=r"""The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.""",
),
},
map_output_pin_spec={
@@ -108,14 +104,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -124,29 +120,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="core::field::high_pass_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFieldHighPassFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFieldHighPassFc
+ inputs:
+ An instance of InputsFieldHighPassFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFieldHighPassFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFieldHighPassFc
+ outputs:
+ An instance of OutputsFieldHighPassFc.
"""
return super().outputs
@@ -179,15 +182,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._both)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -200,15 +203,15 @@ def fields_container(self):
return self._fields_container
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- A threshold scalar or a field containing one
- value is expected.
+ A threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -221,17 +224,15 @@ def threshold(self):
return self._threshold
@property
- def both(self):
- """Allows to connect both input to the operator.
+ def both(self) -> Input:
+ r"""Allows to connect both input to the operator.
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
- Parameters
- ----------
- my_both : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -262,18 +263,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.field_high_pass_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/filter/field_low_pass.py b/src/ansys/dpf/core/operators/filter/field_low_pass.py
index ba7927b7d34..30c7d9bfff0 100644
--- a/src/ansys/dpf/core/operators/filter/field_low_pass.py
+++ b/src/ansys/dpf/core/operators/filter/field_low_pass.py
@@ -4,34 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class field_low_pass(Operator):
- """The low pass filter returns all the values below (but not equal to)
- the threshold value in input.
+ r"""The low pass filter returns all the values below (but not equal to) the
+ threshold value in input.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- threshold : float or Field
- A threshold scalar or a field containing one
- value is expected
- both : bool, optional
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ threshold: float or Field
+ a threshold scalar or a field containing one value is expected
+ both: bool, optional
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -71,9 +71,10 @@ def __init__(self, field=None, threshold=None, both=None, config=None, server=No
self.inputs.both.connect(both)
@staticmethod
- def _spec():
- description = """The low pass filter returns all the values below (but not equal to)
- the threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The low pass filter returns all the values below (but not equal to) the
+threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -81,24 +82,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="threshold",
type_names=["double", "field"],
optional=False,
- document="""A threshold scalar or a field containing one
- value is expected""",
+ document=r"""a threshold scalar or a field containing one value is expected""",
),
2: PinSpecification(
name="both",
type_names=["bool"],
optional=True,
- document="""The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.""",
+ document=r"""The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.""",
),
},
map_output_pin_spec={
@@ -106,14 +102,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -122,29 +118,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="core::field::low_pass", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFieldLowPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFieldLowPass
+ inputs:
+ An instance of InputsFieldLowPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFieldLowPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFieldLowPass
+ outputs:
+ An instance of OutputsFieldLowPass.
"""
return super().outputs
@@ -175,15 +178,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._both)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -196,15 +199,15 @@ def field(self):
return self._field
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- A threshold scalar or a field containing one
- value is expected
+ a threshold scalar or a field containing one value is expected
- Parameters
- ----------
- my_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -217,17 +220,15 @@ def threshold(self):
return self._threshold
@property
- def both(self):
- """Allows to connect both input to the operator.
+ def both(self) -> Input:
+ r"""Allows to connect both input to the operator.
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
- Parameters
- ----------
- my_both : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -258,18 +259,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.field_low_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/filter/field_low_pass_fc.py b/src/ansys/dpf/core/operators/filter/field_low_pass_fc.py
index cd2a06d580c..56f811b3eb8 100644
--- a/src/ansys/dpf/core/operators/filter/field_low_pass_fc.py
+++ b/src/ansys/dpf/core/operators/filter/field_low_pass_fc.py
@@ -4,34 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class field_low_pass_fc(Operator):
- """The low pass filter returns all the values below (but not equal to)
- the threshold value in input.
+ r"""The low pass filter returns all the values below (but not equal to) the
+ threshold value in input.
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
- threshold : float or Field
- A threshold scalar or a field containing one
- value is expected
- both : bool, optional
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
+ threshold: float or Field
+ a threshold scalar or a field containing one value is expected
+ both: bool, optional
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -73,9 +73,10 @@ def __init__(
self.inputs.both.connect(both)
@staticmethod
- def _spec():
- description = """The low pass filter returns all the values below (but not equal to)
- the threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The low pass filter returns all the values below (but not equal to) the
+threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -83,24 +84,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="threshold",
type_names=["double", "field"],
optional=False,
- document="""A threshold scalar or a field containing one
- value is expected""",
+ document=r"""a threshold scalar or a field containing one value is expected""",
),
2: PinSpecification(
name="both",
type_names=["bool"],
optional=True,
- document="""The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.""",
+ document=r"""The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.""",
),
},
map_output_pin_spec={
@@ -108,14 +104,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -124,29 +120,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="core::field::low_pass_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFieldLowPassFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFieldLowPassFc
+ inputs:
+ An instance of InputsFieldLowPassFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFieldLowPassFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFieldLowPassFc
+ outputs:
+ An instance of OutputsFieldLowPassFc.
"""
return super().outputs
@@ -179,15 +182,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._both)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -200,15 +203,15 @@ def fields_container(self):
return self._fields_container
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- A threshold scalar or a field containing one
- value is expected
+ a threshold scalar or a field containing one value is expected
- Parameters
- ----------
- my_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -221,17 +224,15 @@ def threshold(self):
return self._threshold
@property
- def both(self):
- """Allows to connect both input to the operator.
+ def both(self) -> Input:
+ r"""Allows to connect both input to the operator.
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
- Parameters
- ----------
- my_both : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -262,18 +263,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.field_low_pass_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/filter/field_signed_high_pass.py b/src/ansys/dpf/core/operators/filter/field_signed_high_pass.py
index 1ffcb5e8695..d0a1383472d 100644
--- a/src/ansys/dpf/core/operators/filter/field_signed_high_pass.py
+++ b/src/ansys/dpf/core/operators/filter/field_signed_high_pass.py
@@ -4,34 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class field_signed_high_pass(Operator):
- """The high pass filter returns all the values above, or equal, in
- absolute value to the threshold value in input.
+ r"""The high pass filter returns all the values above, or equal, in absolute
+ value to the threshold value in input.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- threshold : float or Field
- A threshold scalar or a field containing one
- value is expected.
- both : bool, optional
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ threshold: float or Field
+ A threshold scalar or a field containing one value is expected.
+ both: bool, optional
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -73,9 +73,10 @@ def __init__(self, field=None, threshold=None, both=None, config=None, server=No
self.inputs.both.connect(both)
@staticmethod
- def _spec():
- description = """The high pass filter returns all the values above, or equal, in
- absolute value to the threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The high pass filter returns all the values above, or equal, in absolute
+value to the threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -83,24 +84,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="threshold",
type_names=["double", "field"],
optional=False,
- document="""A threshold scalar or a field containing one
- value is expected.""",
+ document=r"""A threshold scalar or a field containing one value is expected.""",
),
2: PinSpecification(
name="both",
type_names=["bool"],
optional=True,
- document="""The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.""",
+ document=r"""The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.""",
),
},
map_output_pin_spec={
@@ -108,14 +104,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -124,31 +120,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="core::field::signed_high_pass", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsFieldSignedHighPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFieldSignedHighPass
+ inputs:
+ An instance of InputsFieldSignedHighPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFieldSignedHighPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFieldSignedHighPass
+ outputs:
+ An instance of OutputsFieldSignedHighPass.
"""
return super().outputs
@@ -179,15 +182,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._both)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -200,15 +203,15 @@ def field(self):
return self._field
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- A threshold scalar or a field containing one
- value is expected.
+ A threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -221,17 +224,15 @@ def threshold(self):
return self._threshold
@property
- def both(self):
- """Allows to connect both input to the operator.
+ def both(self) -> Input:
+ r"""Allows to connect both input to the operator.
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
- Parameters
- ----------
- my_both : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -262,18 +263,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.field_signed_high_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/filter/field_signed_high_pass_fc.py b/src/ansys/dpf/core/operators/filter/field_signed_high_pass_fc.py
index 3e133cb61bc..89bda8133e3 100644
--- a/src/ansys/dpf/core/operators/filter/field_signed_high_pass_fc.py
+++ b/src/ansys/dpf/core/operators/filter/field_signed_high_pass_fc.py
@@ -4,34 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class field_signed_high_pass_fc(Operator):
- """The high pass filter returns all the values above, or equal, in
- absolute value to the threshold value in input.
+ r"""The high pass filter returns all the values above, or equal, in absolute
+ value to the threshold value in input.
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
- threshold : float or Field
- A threshold scalar or a field containing one
- value is expected.
- both : bool, optional
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
+ threshold: float or Field
+ A threshold scalar or a field containing one value is expected.
+ both: bool, optional
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -75,9 +75,10 @@ def __init__(
self.inputs.both.connect(both)
@staticmethod
- def _spec():
- description = """The high pass filter returns all the values above, or equal, in
- absolute value to the threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The high pass filter returns all the values above, or equal, in absolute
+value to the threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -85,24 +86,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="threshold",
type_names=["double", "field"],
optional=False,
- document="""A threshold scalar or a field containing one
- value is expected.""",
+ document=r"""A threshold scalar or a field containing one value is expected.""",
),
2: PinSpecification(
name="both",
type_names=["bool"],
optional=True,
- document="""The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.""",
+ document=r"""The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.""",
),
},
map_output_pin_spec={
@@ -110,14 +106,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -126,31 +122,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="core::field::signed_high_pass_fc", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsFieldSignedHighPassFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFieldSignedHighPassFc
+ inputs:
+ An instance of InputsFieldSignedHighPassFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFieldSignedHighPassFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFieldSignedHighPassFc
+ outputs:
+ An instance of OutputsFieldSignedHighPassFc.
"""
return super().outputs
@@ -185,15 +188,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._both)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -206,15 +209,15 @@ def fields_container(self):
return self._fields_container
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- A threshold scalar or a field containing one
- value is expected.
+ A threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -227,17 +230,15 @@ def threshold(self):
return self._threshold
@property
- def both(self):
- """Allows to connect both input to the operator.
+ def both(self) -> Input:
+ r"""Allows to connect both input to the operator.
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
- Parameters
- ----------
- my_both : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -270,18 +271,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.field_signed_high_pass_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/filter/filtering_max_over_time.py b/src/ansys/dpf/core/operators/filter/filtering_max_over_time.py
index 8c2a37ef8bd..94a222ecb06 100644
--- a/src/ansys/dpf/core/operators/filter/filtering_max_over_time.py
+++ b/src/ansys/dpf/core/operators/filter/filtering_max_over_time.py
@@ -4,36 +4,36 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class filtering_max_over_time(Operator):
- """Creates a filtering workflow that will filter results based on a
+ r"""Creates a filtering workflow that will filter results based on a
threshold of a selected invariant.
+
Parameters
----------
- invariant_fc_operator : str
- Name of the invariant operator to be used to
- calculate filter (available: eqv_fc,
- invariants_deriv_fc, invariants_fc).
- output_pin : int, optional
- Output pin of the invariant operator. default
- = 0.
- list_of_results : str, optional
- If no result is given, filter will be applied
- on stresses and strains
- threshold : float, optional
- Threshold from which the operator will
- filter.
+ invariant_fc_operator: str
+ Name of the invariant operator to be used to calculate filter (available: eqv_fc, invariants_deriv_fc, invariants_fc).
+ output_pin: int, optional
+ Output pin of the invariant operator. Default = 0.
+ list_of_results: str, optional
+ If no result is given, filter will be applied on Stresses and Strains
+ threshold: float, optional
+ Threshold from which the operator will filter.
Returns
-------
- workflow : Workflow
+ workflow: Workflow
Examples
--------
@@ -86,9 +86,10 @@ def __init__(
self.inputs.threshold.connect(threshold)
@staticmethod
- def _spec():
- description = """Creates a filtering workflow that will filter results based on a
- threshold of a selected invariant."""
+ def _spec() -> Specification:
+ description = r"""Creates a filtering workflow that will filter results based on a
+threshold of a selected invariant.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -96,30 +97,25 @@ def _spec():
name="invariant_fc_operator",
type_names=["string"],
optional=False,
- document="""Name of the invariant operator to be used to
- calculate filter (available: eqv_fc,
- invariants_deriv_fc, invariants_fc).""",
+ document=r"""Name of the invariant operator to be used to calculate filter (available: eqv_fc, invariants_deriv_fc, invariants_fc).""",
),
1: PinSpecification(
name="output_pin",
type_names=["int32"],
optional=True,
- document="""Output pin of the invariant operator. default
- = 0.""",
+ document=r"""Output pin of the invariant operator. Default = 0.""",
),
2: PinSpecification(
name="list_of_results",
type_names=["vector", "string"],
optional=True,
- document="""If no result is given, filter will be applied
- on stresses and strains""",
+ document=r"""If no result is given, filter will be applied on Stresses and Strains""",
),
3: PinSpecification(
name="threshold",
type_names=["double"],
optional=True,
- document="""Threshold from which the operator will
- filter.""",
+ document=r"""Threshold from which the operator will filter.""",
),
},
map_output_pin_spec={
@@ -127,14 +123,14 @@ def _spec():
name="workflow",
type_names=["workflow"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -143,29 +139,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="filtering_max_over_time", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFilteringMaxOverTime:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFilteringMaxOverTime
+ inputs:
+ An instance of InputsFilteringMaxOverTime.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFilteringMaxOverTime:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFilteringMaxOverTime
+ outputs:
+ An instance of OutputsFilteringMaxOverTime.
"""
return super().outputs
@@ -206,16 +209,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._threshold)
@property
- def invariant_fc_operator(self):
- """Allows to connect invariant_fc_operator input to the operator.
+ def invariant_fc_operator(self) -> Input:
+ r"""Allows to connect invariant_fc_operator input to the operator.
- Name of the invariant operator to be used to
- calculate filter (available: eqv_fc,
- invariants_deriv_fc, invariants_fc).
+ Name of the invariant operator to be used to calculate filter (available: eqv_fc, invariants_deriv_fc, invariants_fc).
- Parameters
- ----------
- my_invariant_fc_operator : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -228,15 +230,15 @@ def invariant_fc_operator(self):
return self._invariant_fc_operator
@property
- def output_pin(self):
- """Allows to connect output_pin input to the operator.
+ def output_pin(self) -> Input:
+ r"""Allows to connect output_pin input to the operator.
- Output pin of the invariant operator. default
- = 0.
+ Output pin of the invariant operator. Default = 0.
- Parameters
- ----------
- my_output_pin : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -249,15 +251,15 @@ def output_pin(self):
return self._output_pin
@property
- def list_of_results(self):
- """Allows to connect list_of_results input to the operator.
+ def list_of_results(self) -> Input:
+ r"""Allows to connect list_of_results input to the operator.
- If no result is given, filter will be applied
- on stresses and strains
+ If no result is given, filter will be applied on Stresses and Strains
- Parameters
- ----------
- my_list_of_results : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -270,15 +272,15 @@ def list_of_results(self):
return self._list_of_results
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- Threshold from which the operator will
- filter.
+ Threshold from which the operator will filter.
- Parameters
- ----------
- my_threshold : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -309,18 +311,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._workflow)
@property
- def workflow(self):
- """Allows to get workflow output of the operator
+ def workflow(self) -> Output:
+ r"""Allows to get workflow output of the operator
Returns
- ----------
- my_workflow : Workflow
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.filtering_max_over_time()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_workflow = op.outputs.workflow()
- """ # noqa: E501
+ """
return self._workflow
diff --git a/src/ansys/dpf/core/operators/filter/scoping_band_pass.py b/src/ansys/dpf/core/operators/filter/scoping_band_pass.py
index 7f5041899c9..8a2dc4d7952 100644
--- a/src/ansys/dpf/core/operators/filter/scoping_band_pass.py
+++ b/src/ansys/dpf/core/operators/filter/scoping_band_pass.py
@@ -4,33 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class scoping_band_pass(Operator):
- """The band pass filter returns all the values above (but not equal to)
- the minimum threshold value and below (but not equal to) the
- maximum threshold value in input.
+ r"""The band pass filter returns all the values above (but not equal to) the
+ minimum threshold value and below (but not equal to) the maximum
+ threshold value in input.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- min_threshold : float or Field
- A minimum threshold scalar or a field
- containing one value is expected.
- max_threshold : float or Field, optional
- A maximum threshold scalar or a field
- containing one value is expected.
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ min_threshold: float or Field
+ A minimum threshold scalar or a field containing one value is expected.
+ max_threshold: float or Field, optional
+ A maximum threshold scalar or a field containing one value is expected.
Returns
-------
- scoping : Scoping
+ scoping: Scoping
Examples
--------
@@ -77,10 +79,11 @@ def __init__(
self.inputs.max_threshold.connect(max_threshold)
@staticmethod
- def _spec():
- description = """The band pass filter returns all the values above (but not equal to)
- the minimum threshold value and below (but not equal to)
- the maximum threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The band pass filter returns all the values above (but not equal to) the
+minimum threshold value and below (but not equal to) the maximum
+threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,22 +91,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="min_threshold",
type_names=["double", "field"],
optional=False,
- document="""A minimum threshold scalar or a field
- containing one value is expected.""",
+ document=r"""A minimum threshold scalar or a field containing one value is expected.""",
),
2: PinSpecification(
name="max_threshold",
type_names=["double", "field"],
optional=True,
- document="""A maximum threshold scalar or a field
- containing one value is expected.""",
+ document=r"""A maximum threshold scalar or a field containing one value is expected.""",
),
},
map_output_pin_spec={
@@ -111,14 +111,14 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -127,29 +127,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="core::scoping::band_pass", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsScopingBandPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsScopingBandPass
+ inputs:
+ An instance of InputsScopingBandPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsScopingBandPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsScopingBandPass
+ outputs:
+ An instance of OutputsScopingBandPass.
"""
return super().outputs
@@ -180,15 +187,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._max_threshold)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -201,15 +208,15 @@ def field(self):
return self._field
@property
- def min_threshold(self):
- """Allows to connect min_threshold input to the operator.
+ def min_threshold(self) -> Input:
+ r"""Allows to connect min_threshold input to the operator.
- A minimum threshold scalar or a field
- containing one value is expected.
+ A minimum threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_min_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -222,15 +229,15 @@ def min_threshold(self):
return self._min_threshold
@property
- def max_threshold(self):
- """Allows to connect max_threshold input to the operator.
+ def max_threshold(self) -> Input:
+ r"""Allows to connect max_threshold input to the operator.
- A maximum threshold scalar or a field
- containing one value is expected.
+ A maximum threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_max_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -261,18 +268,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._scoping)
@property
- def scoping(self):
- """Allows to get scoping output of the operator
+ def scoping(self) -> Output:
+ r"""Allows to get scoping output of the operator
Returns
- ----------
- my_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.scoping_band_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping = op.outputs.scoping()
- """ # noqa: E501
+ """
return self._scoping
diff --git a/src/ansys/dpf/core/operators/filter/scoping_high_pass.py b/src/ansys/dpf/core/operators/filter/scoping_high_pass.py
index cd49a110956..d4de3028624 100644
--- a/src/ansys/dpf/core/operators/filter/scoping_high_pass.py
+++ b/src/ansys/dpf/core/operators/filter/scoping_high_pass.py
@@ -4,34 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class scoping_high_pass(Operator):
- """The high pass filter returns all the values above (but not equal to)
- the threshold value in input.
+ r"""The high pass filter returns all the values above (but not equal to) the
+ threshold value in input.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- threshold : float or Field
- A threshold scalar or a field containing one
- value is expected.
- both : bool, optional
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ threshold: float or Field
+ A threshold scalar or a field containing one value is expected.
+ both: bool, optional
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
Returns
-------
- scoping : Scoping
+ scoping: Scoping
Examples
--------
@@ -71,9 +71,10 @@ def __init__(self, field=None, threshold=None, both=None, config=None, server=No
self.inputs.both.connect(both)
@staticmethod
- def _spec():
- description = """The high pass filter returns all the values above (but not equal to)
- the threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The high pass filter returns all the values above (but not equal to) the
+threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -81,24 +82,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="threshold",
type_names=["double", "field"],
optional=False,
- document="""A threshold scalar or a field containing one
- value is expected.""",
+ document=r"""A threshold scalar or a field containing one value is expected.""",
),
2: PinSpecification(
name="both",
type_names=["bool"],
optional=True,
- document="""The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.""",
+ document=r"""The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.""",
),
},
map_output_pin_spec={
@@ -106,14 +102,14 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -122,29 +118,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="core::scoping::high_pass", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsScopingHighPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsScopingHighPass
+ inputs:
+ An instance of InputsScopingHighPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsScopingHighPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsScopingHighPass
+ outputs:
+ An instance of OutputsScopingHighPass.
"""
return super().outputs
@@ -175,15 +178,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._both)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -196,15 +199,15 @@ def field(self):
return self._field
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- A threshold scalar or a field containing one
- value is expected.
+ A threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -217,17 +220,15 @@ def threshold(self):
return self._threshold
@property
- def both(self):
- """Allows to connect both input to the operator.
+ def both(self) -> Input:
+ r"""Allows to connect both input to the operator.
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
- Parameters
- ----------
- my_both : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -258,18 +259,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._scoping)
@property
- def scoping(self):
- """Allows to get scoping output of the operator
+ def scoping(self) -> Output:
+ r"""Allows to get scoping output of the operator
Returns
- ----------
- my_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.scoping_high_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping = op.outputs.scoping()
- """ # noqa: E501
+ """
return self._scoping
diff --git a/src/ansys/dpf/core/operators/filter/scoping_low_pass.py b/src/ansys/dpf/core/operators/filter/scoping_low_pass.py
index 9661d84d0b6..8e6f6e4148b 100644
--- a/src/ansys/dpf/core/operators/filter/scoping_low_pass.py
+++ b/src/ansys/dpf/core/operators/filter/scoping_low_pass.py
@@ -4,34 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class scoping_low_pass(Operator):
- """The low pass filter returns all the values below (but not equal to)
- the threshold value in input.
+ r"""The low pass filter returns all the values below (but not equal to) the
+ threshold value in input.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- threshold : float or Field
- A threshold scalar or a field containing one
- value is expected
- both : bool, optional
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ threshold: float or Field
+ a threshold scalar or a field containing one value is expected
+ both: bool, optional
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
Returns
-------
- scoping : Scoping
+ scoping: Scoping
Examples
--------
@@ -71,9 +71,10 @@ def __init__(self, field=None, threshold=None, both=None, config=None, server=No
self.inputs.both.connect(both)
@staticmethod
- def _spec():
- description = """The low pass filter returns all the values below (but not equal to)
- the threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The low pass filter returns all the values below (but not equal to) the
+threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -81,24 +82,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="threshold",
type_names=["double", "field"],
optional=False,
- document="""A threshold scalar or a field containing one
- value is expected""",
+ document=r"""a threshold scalar or a field containing one value is expected""",
),
2: PinSpecification(
name="both",
type_names=["bool"],
optional=True,
- document="""The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.""",
+ document=r"""The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.""",
),
},
map_output_pin_spec={
@@ -106,14 +102,14 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -122,29 +118,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="core::scoping::low_pass", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsScopingLowPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsScopingLowPass
+ inputs:
+ An instance of InputsScopingLowPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsScopingLowPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsScopingLowPass
+ outputs:
+ An instance of OutputsScopingLowPass.
"""
return super().outputs
@@ -175,15 +178,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._both)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -196,15 +199,15 @@ def field(self):
return self._field
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- A threshold scalar or a field containing one
- value is expected
+ a threshold scalar or a field containing one value is expected
- Parameters
- ----------
- my_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -217,17 +220,15 @@ def threshold(self):
return self._threshold
@property
- def both(self):
- """Allows to connect both input to the operator.
+ def both(self) -> Input:
+ r"""Allows to connect both input to the operator.
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
- Parameters
- ----------
- my_both : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -258,18 +259,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._scoping)
@property
- def scoping(self):
- """Allows to get scoping output of the operator
+ def scoping(self) -> Output:
+ r"""Allows to get scoping output of the operator
Returns
- ----------
- my_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.scoping_low_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping = op.outputs.scoping()
- """ # noqa: E501
+ """
return self._scoping
diff --git a/src/ansys/dpf/core/operators/filter/scoping_signed_high_pass.py b/src/ansys/dpf/core/operators/filter/scoping_signed_high_pass.py
index 02c7921a4d4..1bf0b616e02 100644
--- a/src/ansys/dpf/core/operators/filter/scoping_signed_high_pass.py
+++ b/src/ansys/dpf/core/operators/filter/scoping_signed_high_pass.py
@@ -4,34 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class scoping_signed_high_pass(Operator):
- """The high pass filter returns all the values above, or equal, in
- absolute value to the threshold value in input.
+ r"""The high pass filter returns all the values above, or equal, in absolute
+ value to the threshold value in input.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- threshold : float or Field
- A threshold scalar or a field containing one
- value is expected.
- both : bool, optional
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ threshold: float or Field
+ A threshold scalar or a field containing one value is expected.
+ both: bool, optional
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
Returns
-------
- scoping : Scoping
+ scoping: Scoping
Examples
--------
@@ -73,9 +73,10 @@ def __init__(self, field=None, threshold=None, both=None, config=None, server=No
self.inputs.both.connect(both)
@staticmethod
- def _spec():
- description = """The high pass filter returns all the values above, or equal, in
- absolute value to the threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The high pass filter returns all the values above, or equal, in absolute
+value to the threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -83,24 +84,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="threshold",
type_names=["double", "field"],
optional=False,
- document="""A threshold scalar or a field containing one
- value is expected.""",
+ document=r"""A threshold scalar or a field containing one value is expected.""",
),
2: PinSpecification(
name="both",
type_names=["bool"],
optional=True,
- document="""The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.""",
+ document=r"""The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.""",
),
},
map_output_pin_spec={
@@ -108,14 +104,14 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -124,31 +120,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="core::scoping::signed_high_pass", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsScopingSignedHighPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsScopingSignedHighPass
+ inputs:
+ An instance of InputsScopingSignedHighPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsScopingSignedHighPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsScopingSignedHighPass
+ outputs:
+ An instance of OutputsScopingSignedHighPass.
"""
return super().outputs
@@ -181,15 +184,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._both)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -202,15 +205,15 @@ def field(self):
return self._field
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- A threshold scalar or a field containing one
- value is expected.
+ A threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -223,17 +226,15 @@ def threshold(self):
return self._threshold
@property
- def both(self):
- """Allows to connect both input to the operator.
+ def both(self) -> Input:
+ r"""Allows to connect both input to the operator.
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
- Parameters
- ----------
- my_both : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -264,18 +265,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._scoping)
@property
- def scoping(self):
- """Allows to get scoping output of the operator
+ def scoping(self) -> Output:
+ r"""Allows to get scoping output of the operator
Returns
- ----------
- my_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.scoping_signed_high_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping = op.outputs.scoping()
- """ # noqa: E501
+ """
return self._scoping
diff --git a/src/ansys/dpf/core/operators/filter/timefreq_band_pass.py b/src/ansys/dpf/core/operators/filter/timefreq_band_pass.py
index 80a4abfac90..b655d9cebb4 100644
--- a/src/ansys/dpf/core/operators/filter/timefreq_band_pass.py
+++ b/src/ansys/dpf/core/operators/filter/timefreq_band_pass.py
@@ -4,32 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class timefreq_band_pass(Operator):
- """The band pass filter returns all the values above (but not equal to)
- the minimum threshold value and below (but not equal to) the
- maximum threshold value in input.
+ r"""The band pass filter returns all the values above (but not equal to) the
+ minimum threshold value and below (but not equal to) the maximum
+ threshold value in input.
+
Parameters
----------
- time_freq_support : TimeFreqSupport
- min_threshold : float or Field
- A minimum threshold scalar or a field
- containing one value is expected.
- max_threshold : float or Field, optional
- A maximum threshold scalar or a field
- containing one value is expected.
+ time_freq_support: TimeFreqSupport
+ min_threshold: float or Field
+ A minimum threshold scalar or a field containing one value is expected.
+ max_threshold: float or Field, optional
+ A maximum threshold scalar or a field containing one value is expected.
Returns
-------
- time_freq_support : TimeFreqSupport
- scoping : Scoping
+ time_freq_support: TimeFreqSupport
+ scoping: Scoping
Examples
--------
@@ -77,10 +80,11 @@ def __init__(
self.inputs.max_threshold.connect(max_threshold)
@staticmethod
- def _spec():
- description = """The band pass filter returns all the values above (but not equal to)
- the minimum threshold value and below (but not equal to)
- the maximum threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The band pass filter returns all the values above (but not equal to) the
+minimum threshold value and below (but not equal to) the maximum
+threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,21 +92,19 @@ def _spec():
name="time_freq_support",
type_names=["time_freq_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="min_threshold",
type_names=["double", "field"],
optional=False,
- document="""A minimum threshold scalar or a field
- containing one value is expected.""",
+ document=r"""A minimum threshold scalar or a field containing one value is expected.""",
),
2: PinSpecification(
name="max_threshold",
type_names=["double", "field"],
optional=True,
- document="""A maximum threshold scalar or a field
- containing one value is expected.""",
+ document=r"""A maximum threshold scalar or a field containing one value is expected.""",
),
},
map_output_pin_spec={
@@ -110,20 +112,20 @@ def _spec():
name="time_freq_support",
type_names=["time_freq_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -132,29 +134,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="core::timefreq::band_pass", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsTimefreqBandPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTimefreqBandPass
+ inputs:
+ An instance of InputsTimefreqBandPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTimefreqBandPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTimefreqBandPass
+ outputs:
+ An instance of OutputsTimefreqBandPass.
"""
return super().outputs
@@ -187,12 +196,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._max_threshold)
@property
- def time_freq_support(self):
- """Allows to connect time_freq_support input to the operator.
+ def time_freq_support(self) -> Input:
+ r"""Allows to connect time_freq_support input to the operator.
- Parameters
- ----------
- my_time_freq_support : TimeFreqSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -205,15 +215,15 @@ def time_freq_support(self):
return self._time_freq_support
@property
- def min_threshold(self):
- """Allows to connect min_threshold input to the operator.
+ def min_threshold(self) -> Input:
+ r"""Allows to connect min_threshold input to the operator.
- A minimum threshold scalar or a field
- containing one value is expected.
+ A minimum threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_min_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -226,15 +236,15 @@ def min_threshold(self):
return self._min_threshold
@property
- def max_threshold(self):
- """Allows to connect max_threshold input to the operator.
+ def max_threshold(self) -> Input:
+ r"""Allows to connect max_threshold input to the operator.
- A maximum threshold scalar or a field
- containing one value is expected.
+ A maximum threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_max_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -270,35 +280,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._scoping)
@property
- def time_freq_support(self):
- """Allows to get time_freq_support output of the operator
+ def time_freq_support(self) -> Output:
+ r"""Allows to get time_freq_support output of the operator
Returns
- ----------
- my_time_freq_support : TimeFreqSupport
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.timefreq_band_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_time_freq_support = op.outputs.time_freq_support()
- """ # noqa: E501
+ """
return self._time_freq_support
@property
- def scoping(self):
- """Allows to get scoping output of the operator
+ def scoping(self) -> Output:
+ r"""Allows to get scoping output of the operator
Returns
- ----------
- my_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.timefreq_band_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping = op.outputs.scoping()
- """ # noqa: E501
+ """
return self._scoping
diff --git a/src/ansys/dpf/core/operators/filter/timefreq_high_pass.py b/src/ansys/dpf/core/operators/filter/timefreq_high_pass.py
index 0a43b8dfe58..6c761e490b8 100644
--- a/src/ansys/dpf/core/operators/filter/timefreq_high_pass.py
+++ b/src/ansys/dpf/core/operators/filter/timefreq_high_pass.py
@@ -4,33 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class timefreq_high_pass(Operator):
- """The high pass filter returns all the values above (but not equal to)
- the threshold value in input.
+ r"""The high pass filter returns all the values above (but not equal to) the
+ threshold value in input.
+
Parameters
----------
- time_freq_support : TimeFreqSupport
- threshold : float or Field
- A threshold scalar or a field containing one
- value is expected.
- both : bool, optional
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ time_freq_support: TimeFreqSupport
+ threshold: float or Field
+ A threshold scalar or a field containing one value is expected.
+ both: bool, optional
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
Returns
-------
- time_freq_support : TimeFreqSupport
- scoping : Scoping
+ time_freq_support: TimeFreqSupport
+ scoping: Scoping
Examples
--------
@@ -78,9 +79,10 @@ def __init__(
self.inputs.both.connect(both)
@staticmethod
- def _spec():
- description = """The high pass filter returns all the values above (but not equal to)
- the threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The high pass filter returns all the values above (but not equal to) the
+threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,23 +90,19 @@ def _spec():
name="time_freq_support",
type_names=["time_freq_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="threshold",
type_names=["double", "field"],
optional=False,
- document="""A threshold scalar or a field containing one
- value is expected.""",
+ document=r"""A threshold scalar or a field containing one value is expected.""",
),
2: PinSpecification(
name="both",
type_names=["bool"],
optional=True,
- document="""The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.""",
+ document=r"""The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.""",
),
},
map_output_pin_spec={
@@ -112,20 +110,20 @@ def _spec():
name="time_freq_support",
type_names=["time_freq_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -134,29 +132,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="core::timefreq::high_pass", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsTimefreqHighPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTimefreqHighPass
+ inputs:
+ An instance of InputsTimefreqHighPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTimefreqHighPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTimefreqHighPass
+ outputs:
+ An instance of OutputsTimefreqHighPass.
"""
return super().outputs
@@ -189,12 +194,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._both)
@property
- def time_freq_support(self):
- """Allows to connect time_freq_support input to the operator.
+ def time_freq_support(self) -> Input:
+ r"""Allows to connect time_freq_support input to the operator.
- Parameters
- ----------
- my_time_freq_support : TimeFreqSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -207,15 +213,15 @@ def time_freq_support(self):
return self._time_freq_support
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- A threshold scalar or a field containing one
- value is expected.
+ A threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -228,17 +234,15 @@ def threshold(self):
return self._threshold
@property
- def both(self):
- """Allows to connect both input to the operator.
+ def both(self) -> Input:
+ r"""Allows to connect both input to the operator.
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
- Parameters
- ----------
- my_both : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -274,35 +278,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._scoping)
@property
- def time_freq_support(self):
- """Allows to get time_freq_support output of the operator
+ def time_freq_support(self) -> Output:
+ r"""Allows to get time_freq_support output of the operator
Returns
- ----------
- my_time_freq_support : TimeFreqSupport
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.timefreq_high_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_time_freq_support = op.outputs.time_freq_support()
- """ # noqa: E501
+ """
return self._time_freq_support
@property
- def scoping(self):
- """Allows to get scoping output of the operator
+ def scoping(self) -> Output:
+ r"""Allows to get scoping output of the operator
Returns
- ----------
- my_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.timefreq_high_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping = op.outputs.scoping()
- """ # noqa: E501
+ """
return self._scoping
diff --git a/src/ansys/dpf/core/operators/filter/timefreq_low_pass.py b/src/ansys/dpf/core/operators/filter/timefreq_low_pass.py
index 290b62a7f3e..3a5d296dbbf 100644
--- a/src/ansys/dpf/core/operators/filter/timefreq_low_pass.py
+++ b/src/ansys/dpf/core/operators/filter/timefreq_low_pass.py
@@ -4,33 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class timefreq_low_pass(Operator):
- """The low pass filter returns all the values below (but not equal to)
- the threshold value in input.
+ r"""The low pass filter returns all the values below (but not equal to) the
+ threshold value in input.
+
Parameters
----------
- time_freq_support : TimeFreqSupport
- threshold : float or Field
- A threshold scalar or a field containing one
- value is expected
- both : bool, optional
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ time_freq_support: TimeFreqSupport
+ threshold: float or Field
+ a threshold scalar or a field containing one value is expected
+ both: bool, optional
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
Returns
-------
- time_freq_support : TimeFreqSupport
- scoping : Scoping
+ time_freq_support: TimeFreqSupport
+ scoping: Scoping
Examples
--------
@@ -78,9 +79,10 @@ def __init__(
self.inputs.both.connect(both)
@staticmethod
- def _spec():
- description = """The low pass filter returns all the values below (but not equal to)
- the threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The low pass filter returns all the values below (but not equal to) the
+threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,23 +90,19 @@ def _spec():
name="time_freq_support",
type_names=["time_freq_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="threshold",
type_names=["double", "field"],
optional=False,
- document="""A threshold scalar or a field containing one
- value is expected""",
+ document=r"""a threshold scalar or a field containing one value is expected""",
),
2: PinSpecification(
name="both",
type_names=["bool"],
optional=True,
- document="""The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.""",
+ document=r"""The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.""",
),
},
map_output_pin_spec={
@@ -112,20 +110,20 @@ def _spec():
name="time_freq_support",
type_names=["time_freq_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -134,29 +132,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="core::timefreq::low_pass", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsTimefreqLowPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTimefreqLowPass
+ inputs:
+ An instance of InputsTimefreqLowPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTimefreqLowPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTimefreqLowPass
+ outputs:
+ An instance of OutputsTimefreqLowPass.
"""
return super().outputs
@@ -189,12 +194,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._both)
@property
- def time_freq_support(self):
- """Allows to connect time_freq_support input to the operator.
+ def time_freq_support(self) -> Input:
+ r"""Allows to connect time_freq_support input to the operator.
- Parameters
- ----------
- my_time_freq_support : TimeFreqSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -207,15 +213,15 @@ def time_freq_support(self):
return self._time_freq_support
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- A threshold scalar or a field containing one
- value is expected
+ a threshold scalar or a field containing one value is expected
- Parameters
- ----------
- my_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -228,17 +234,15 @@ def threshold(self):
return self._threshold
@property
- def both(self):
- """Allows to connect both input to the operator.
+ def both(self) -> Input:
+ r"""Allows to connect both input to the operator.
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
- Parameters
- ----------
- my_both : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -272,35 +276,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._scoping)
@property
- def time_freq_support(self):
- """Allows to get time_freq_support output of the operator
+ def time_freq_support(self) -> Output:
+ r"""Allows to get time_freq_support output of the operator
Returns
- ----------
- my_time_freq_support : TimeFreqSupport
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.timefreq_low_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_time_freq_support = op.outputs.time_freq_support()
- """ # noqa: E501
+ """
return self._time_freq_support
@property
- def scoping(self):
- """Allows to get scoping output of the operator
+ def scoping(self) -> Output:
+ r"""Allows to get scoping output of the operator
Returns
- ----------
- my_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.timefreq_low_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping = op.outputs.scoping()
- """ # noqa: E501
+ """
return self._scoping
diff --git a/src/ansys/dpf/core/operators/filter/timefreq_signed_high_pass.py b/src/ansys/dpf/core/operators/filter/timefreq_signed_high_pass.py
index 5ed640592cc..fca7c739363 100644
--- a/src/ansys/dpf/core/operators/filter/timefreq_signed_high_pass.py
+++ b/src/ansys/dpf/core/operators/filter/timefreq_signed_high_pass.py
@@ -4,33 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class timefreq_signed_high_pass(Operator):
- """The high pass filter returns all the values above, or equal, in
- absolute value to the threshold value in input.
+ r"""The high pass filter returns all the values above, or equal, in absolute
+ value to the threshold value in input.
+
Parameters
----------
- time_freq_support : TimeFreqSupport
- threshold : float or Field
- A threshold scalar or a field containing one
- value is expected.
- both : bool, optional
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ time_freq_support: TimeFreqSupport
+ threshold: float or Field
+ A threshold scalar or a field containing one value is expected.
+ both: bool, optional
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
Returns
-------
- time_freq_support : TimeFreqSupport
- scoping : Scoping
+ time_freq_support: TimeFreqSupport
+ scoping: Scoping
Examples
--------
@@ -80,9 +81,10 @@ def __init__(
self.inputs.both.connect(both)
@staticmethod
- def _spec():
- description = """The high pass filter returns all the values above, or equal, in
- absolute value to the threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The high pass filter returns all the values above, or equal, in absolute
+value to the threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -90,23 +92,19 @@ def _spec():
name="time_freq_support",
type_names=["time_freq_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="threshold",
type_names=["double", "field"],
optional=False,
- document="""A threshold scalar or a field containing one
- value is expected.""",
+ document=r"""A threshold scalar or a field containing one value is expected.""",
),
2: PinSpecification(
name="both",
type_names=["bool"],
optional=True,
- document="""The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.""",
+ document=r"""The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.""",
),
},
map_output_pin_spec={
@@ -114,20 +112,20 @@ def _spec():
name="time_freq_support",
type_names=["time_freq_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -136,31 +134,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="core::timefreq::signed_high_pass", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsTimefreqSignedHighPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTimefreqSignedHighPass
+ inputs:
+ An instance of InputsTimefreqSignedHighPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTimefreqSignedHighPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTimefreqSignedHighPass
+ outputs:
+ An instance of OutputsTimefreqSignedHighPass.
"""
return super().outputs
@@ -195,12 +200,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._both)
@property
- def time_freq_support(self):
- """Allows to connect time_freq_support input to the operator.
+ def time_freq_support(self) -> Input:
+ r"""Allows to connect time_freq_support input to the operator.
- Parameters
- ----------
- my_time_freq_support : TimeFreqSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -213,15 +219,15 @@ def time_freq_support(self):
return self._time_freq_support
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- A threshold scalar or a field containing one
- value is expected.
+ A threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -234,17 +240,15 @@ def threshold(self):
return self._threshold
@property
- def both(self):
- """Allows to connect both input to the operator.
+ def both(self) -> Input:
+ r"""Allows to connect both input to the operator.
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
- Parameters
- ----------
- my_both : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -280,35 +284,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._scoping)
@property
- def time_freq_support(self):
- """Allows to get time_freq_support output of the operator
+ def time_freq_support(self) -> Output:
+ r"""Allows to get time_freq_support output of the operator
Returns
- ----------
- my_time_freq_support : TimeFreqSupport
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.timefreq_signed_high_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_time_freq_support = op.outputs.time_freq_support()
- """ # noqa: E501
+ """
return self._time_freq_support
@property
- def scoping(self):
- """Allows to get scoping output of the operator
+ def scoping(self) -> Output:
+ r"""Allows to get scoping output of the operator
Returns
- ----------
- my_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.timefreq_signed_high_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping = op.outputs.scoping()
- """ # noqa: E501
+ """
return self._scoping
diff --git a/src/ansys/dpf/core/operators/filter/timescoping_band_pass.py b/src/ansys/dpf/core/operators/filter/timescoping_band_pass.py
index 57d592be89a..572145a3e4d 100644
--- a/src/ansys/dpf/core/operators/filter/timescoping_band_pass.py
+++ b/src/ansys/dpf/core/operators/filter/timescoping_band_pass.py
@@ -4,31 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class timescoping_band_pass(Operator):
- """The band pass filter returns all the values above (but not equal to)
- the minimum threshold value and below (but not equal to) the
- maximum threshold value in input.
+ r"""The band pass filter returns all the values above (but not equal to) the
+ minimum threshold value and below (but not equal to) the maximum
+ threshold value in input.
+
Parameters
----------
- time_freq_support : TimeFreqSupport
- min_threshold : float or Field
- A minimum threshold scalar or a field
- containing one value is expected.
- max_threshold : float or Field, optional
- A maximum threshold scalar or a field
- containing one value is expected.
+ time_freq_support: TimeFreqSupport
+ min_threshold: float or Field
+ A minimum threshold scalar or a field containing one value is expected.
+ max_threshold: float or Field, optional
+ A maximum threshold scalar or a field containing one value is expected.
Returns
-------
- scoping : Scoping
+ scoping: Scoping
Examples
--------
@@ -77,10 +80,11 @@ def __init__(
self.inputs.max_threshold.connect(max_threshold)
@staticmethod
- def _spec():
- description = """The band pass filter returns all the values above (but not equal to)
- the minimum threshold value and below (but not equal to)
- the maximum threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The band pass filter returns all the values above (but not equal to) the
+minimum threshold value and below (but not equal to) the maximum
+threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,21 +92,19 @@ def _spec():
name="time_freq_support",
type_names=["time_freq_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="min_threshold",
type_names=["double", "field"],
optional=False,
- document="""A minimum threshold scalar or a field
- containing one value is expected.""",
+ document=r"""A minimum threshold scalar or a field containing one value is expected.""",
),
2: PinSpecification(
name="max_threshold",
type_names=["double", "field"],
optional=True,
- document="""A maximum threshold scalar or a field
- containing one value is expected.""",
+ document=r"""A maximum threshold scalar or a field containing one value is expected.""",
),
},
map_output_pin_spec={
@@ -110,14 +112,14 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -126,31 +128,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="core::timescoping::band_pass", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsTimescopingBandPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTimescopingBandPass
+ inputs:
+ An instance of InputsTimescopingBandPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTimescopingBandPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTimescopingBandPass
+ outputs:
+ An instance of OutputsTimescopingBandPass.
"""
return super().outputs
@@ -187,12 +196,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._max_threshold)
@property
- def time_freq_support(self):
- """Allows to connect time_freq_support input to the operator.
+ def time_freq_support(self) -> Input:
+ r"""Allows to connect time_freq_support input to the operator.
- Parameters
- ----------
- my_time_freq_support : TimeFreqSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -205,15 +215,15 @@ def time_freq_support(self):
return self._time_freq_support
@property
- def min_threshold(self):
- """Allows to connect min_threshold input to the operator.
+ def min_threshold(self) -> Input:
+ r"""Allows to connect min_threshold input to the operator.
- A minimum threshold scalar or a field
- containing one value is expected.
+ A minimum threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_min_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -226,15 +236,15 @@ def min_threshold(self):
return self._min_threshold
@property
- def max_threshold(self):
- """Allows to connect max_threshold input to the operator.
+ def max_threshold(self) -> Input:
+ r"""Allows to connect max_threshold input to the operator.
- A maximum threshold scalar or a field
- containing one value is expected.
+ A maximum threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_max_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -265,18 +275,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._scoping)
@property
- def scoping(self):
- """Allows to get scoping output of the operator
+ def scoping(self) -> Output:
+ r"""Allows to get scoping output of the operator
Returns
- ----------
- my_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.timescoping_band_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping = op.outputs.scoping()
- """ # noqa: E501
+ """
return self._scoping
diff --git a/src/ansys/dpf/core/operators/filter/timescoping_high_pass.py b/src/ansys/dpf/core/operators/filter/timescoping_high_pass.py
index ca0e3d3e7b6..ed1c7b5725d 100644
--- a/src/ansys/dpf/core/operators/filter/timescoping_high_pass.py
+++ b/src/ansys/dpf/core/operators/filter/timescoping_high_pass.py
@@ -4,32 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class timescoping_high_pass(Operator):
- """The high pass filter returns all the values above (but not equal to)
- the threshold value in input.
+ r"""The high pass filter returns all the values above (but not equal to) the
+ threshold value in input.
+
Parameters
----------
- time_freq_support : TimeFreqSupport
- threshold : float or Field
- A threshold scalar or a field containing one
- value is expected.
- both : bool, optional
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ time_freq_support: TimeFreqSupport
+ threshold: float or Field
+ A threshold scalar or a field containing one value is expected.
+ both: bool, optional
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
Returns
-------
- scoping : Scoping
+ scoping: Scoping
Examples
--------
@@ -78,9 +79,10 @@ def __init__(
self.inputs.both.connect(both)
@staticmethod
- def _spec():
- description = """The high pass filter returns all the values above (but not equal to)
- the threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The high pass filter returns all the values above (but not equal to) the
+threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,23 +90,19 @@ def _spec():
name="time_freq_support",
type_names=["time_freq_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="threshold",
type_names=["double", "field"],
optional=False,
- document="""A threshold scalar or a field containing one
- value is expected.""",
+ document=r"""A threshold scalar or a field containing one value is expected.""",
),
2: PinSpecification(
name="both",
type_names=["bool"],
optional=True,
- document="""The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.""",
+ document=r"""The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.""",
),
},
map_output_pin_spec={
@@ -112,14 +110,14 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -128,31 +126,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="core::timescoping::high_pass", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsTimescopingHighPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTimescopingHighPass
+ inputs:
+ An instance of InputsTimescopingHighPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTimescopingHighPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTimescopingHighPass
+ outputs:
+ An instance of OutputsTimescopingHighPass.
"""
return super().outputs
@@ -185,12 +190,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._both)
@property
- def time_freq_support(self):
- """Allows to connect time_freq_support input to the operator.
+ def time_freq_support(self) -> Input:
+ r"""Allows to connect time_freq_support input to the operator.
- Parameters
- ----------
- my_time_freq_support : TimeFreqSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -203,15 +209,15 @@ def time_freq_support(self):
return self._time_freq_support
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- A threshold scalar or a field containing one
- value is expected.
+ A threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -224,17 +230,15 @@ def threshold(self):
return self._threshold
@property
- def both(self):
- """Allows to connect both input to the operator.
+ def both(self) -> Input:
+ r"""Allows to connect both input to the operator.
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
- Parameters
- ----------
- my_both : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -265,18 +269,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._scoping)
@property
- def scoping(self):
- """Allows to get scoping output of the operator
+ def scoping(self) -> Output:
+ r"""Allows to get scoping output of the operator
Returns
- ----------
- my_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.timescoping_high_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping = op.outputs.scoping()
- """ # noqa: E501
+ """
return self._scoping
diff --git a/src/ansys/dpf/core/operators/filter/timescoping_low_pass.py b/src/ansys/dpf/core/operators/filter/timescoping_low_pass.py
index 2f973f77cb5..0851e2a9c5d 100644
--- a/src/ansys/dpf/core/operators/filter/timescoping_low_pass.py
+++ b/src/ansys/dpf/core/operators/filter/timescoping_low_pass.py
@@ -4,32 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class timescoping_low_pass(Operator):
- """The low pass filter returns all the values below (but not equal to)
- the threshold value in input.
+ r"""The low pass filter returns all the values below (but not equal to) the
+ threshold value in input.
+
Parameters
----------
- time_freq_support : TimeFreqSupport
- threshold : float or Field
- A threshold scalar or a field containing one
- value is expected
- both : bool, optional
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ time_freq_support: TimeFreqSupport
+ threshold: float or Field
+ a threshold scalar or a field containing one value is expected
+ both: bool, optional
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
Returns
-------
- scoping : Scoping
+ scoping: Scoping
Examples
--------
@@ -78,9 +79,10 @@ def __init__(
self.inputs.both.connect(both)
@staticmethod
- def _spec():
- description = """The low pass filter returns all the values below (but not equal to)
- the threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The low pass filter returns all the values below (but not equal to) the
+threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,23 +90,19 @@ def _spec():
name="time_freq_support",
type_names=["time_freq_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="threshold",
type_names=["double", "field"],
optional=False,
- document="""A threshold scalar or a field containing one
- value is expected""",
+ document=r"""a threshold scalar or a field containing one value is expected""",
),
2: PinSpecification(
name="both",
type_names=["bool"],
optional=True,
- document="""The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.""",
+ document=r"""The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.""",
),
},
map_output_pin_spec={
@@ -112,14 +110,14 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -128,31 +126,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="core::timescoping::low_pass", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsTimescopingLowPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTimescopingLowPass
+ inputs:
+ An instance of InputsTimescopingLowPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTimescopingLowPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTimescopingLowPass
+ outputs:
+ An instance of OutputsTimescopingLowPass.
"""
return super().outputs
@@ -185,12 +190,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._both)
@property
- def time_freq_support(self):
- """Allows to connect time_freq_support input to the operator.
+ def time_freq_support(self) -> Input:
+ r"""Allows to connect time_freq_support input to the operator.
- Parameters
- ----------
- my_time_freq_support : TimeFreqSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -203,15 +209,15 @@ def time_freq_support(self):
return self._time_freq_support
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- A threshold scalar or a field containing one
- value is expected
+ a threshold scalar or a field containing one value is expected
- Parameters
- ----------
- my_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -224,17 +230,15 @@ def threshold(self):
return self._threshold
@property
- def both(self):
- """Allows to connect both input to the operator.
+ def both(self) -> Input:
+ r"""Allows to connect both input to the operator.
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
- Parameters
- ----------
- my_both : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -265,18 +269,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._scoping)
@property
- def scoping(self):
- """Allows to get scoping output of the operator
+ def scoping(self) -> Output:
+ r"""Allows to get scoping output of the operator
Returns
- ----------
- my_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.timescoping_low_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping = op.outputs.scoping()
- """ # noqa: E501
+ """
return self._scoping
diff --git a/src/ansys/dpf/core/operators/filter/timescoping_signed_high_pass.py b/src/ansys/dpf/core/operators/filter/timescoping_signed_high_pass.py
index b11d46da0c4..fdc1a3fae3e 100644
--- a/src/ansys/dpf/core/operators/filter/timescoping_signed_high_pass.py
+++ b/src/ansys/dpf/core/operators/filter/timescoping_signed_high_pass.py
@@ -4,32 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class timescoping_signed_high_pass(Operator):
- """The high pass filter returns all the values above, or equal, in
- absolute value to the threshold value in input.
+ r"""The high pass filter returns all the values above, or equal, in absolute
+ value to the threshold value in input.
+
Parameters
----------
- time_freq_support : TimeFreqSupport
- threshold : float or Field
- A threshold scalar or a field containing one
- value is expected.
- both : bool, optional
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ time_freq_support: TimeFreqSupport
+ threshold: float or Field
+ A threshold scalar or a field containing one value is expected.
+ both: bool, optional
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
Returns
-------
- scoping : Scoping
+ scoping: Scoping
Examples
--------
@@ -78,9 +79,10 @@ def __init__(
self.inputs.both.connect(both)
@staticmethod
- def _spec():
- description = """The high pass filter returns all the values above, or equal, in
- absolute value to the threshold value in input."""
+ def _spec() -> Specification:
+ description = r"""The high pass filter returns all the values above, or equal, in absolute
+value to the threshold value in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,23 +90,19 @@ def _spec():
name="time_freq_support",
type_names=["time_freq_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="threshold",
type_names=["double", "field"],
optional=False,
- document="""A threshold scalar or a field containing one
- value is expected.""",
+ document=r"""A threshold scalar or a field containing one value is expected.""",
),
2: PinSpecification(
name="both",
type_names=["bool"],
optional=True,
- document="""The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.""",
+ document=r"""The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.""",
),
},
map_output_pin_spec={
@@ -112,14 +110,14 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -128,31 +126,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="core::timescoping::signed_high_pass", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsTimescopingSignedHighPass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTimescopingSignedHighPass
+ inputs:
+ An instance of InputsTimescopingSignedHighPass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTimescopingSignedHighPass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTimescopingSignedHighPass
+ outputs:
+ An instance of OutputsTimescopingSignedHighPass.
"""
return super().outputs
@@ -187,12 +192,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._both)
@property
- def time_freq_support(self):
- """Allows to connect time_freq_support input to the operator.
+ def time_freq_support(self) -> Input:
+ r"""Allows to connect time_freq_support input to the operator.
- Parameters
- ----------
- my_time_freq_support : TimeFreqSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -205,15 +211,15 @@ def time_freq_support(self):
return self._time_freq_support
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- A threshold scalar or a field containing one
- value is expected.
+ A threshold scalar or a field containing one value is expected.
- Parameters
- ----------
- my_threshold : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -226,17 +232,15 @@ def threshold(self):
return self._threshold
@property
- def both(self):
- """Allows to connect both input to the operator.
+ def both(self) -> Input:
+ r"""Allows to connect both input to the operator.
- The default is false. if set to true, the
- complement of the filtered fields
- container is returned on output pin
- 1.
+ The default is false. If set to true, the complement of the filtered fields container is returned on output pin 1.
- Parameters
- ----------
- my_both : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -269,18 +273,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._scoping)
@property
- def scoping(self):
- """Allows to get scoping output of the operator
+ def scoping(self) -> Output:
+ r"""Allows to get scoping output of the operator
Returns
- ----------
- my_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.filter.timescoping_signed_high_pass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping = op.outputs.scoping()
- """ # noqa: E501
+ """
return self._scoping
diff --git a/src/ansys/dpf/core/operators/geo/cartesian_to_spherical.py b/src/ansys/dpf/core/operators/geo/cartesian_to_spherical.py
index bb2538f84ba..b07e4bbce43 100644
--- a/src/ansys/dpf/core/operators/geo/cartesian_to_spherical.py
+++ b/src/ansys/dpf/core/operators/geo/cartesian_to_spherical.py
@@ -4,23 +4,28 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cartesian_to_spherical(Operator):
- """Converts 3D field from cartesian coordinates to spherical coordinates.
+ r"""Converts 3D field from cartesian coordinates to spherical coordinates.
+
Parameters
----------
- field : Field or FieldsContainer
+ field: Field or FieldsContainer
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -50,10 +55,9 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = (
- """Converts 3D field from cartesian coordinates to spherical coordinates."""
- )
+ def _spec() -> Specification:
+ description = r"""Converts 3D field from cartesian coordinates to spherical coordinates.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,7 +65,7 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -69,14 +73,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -85,29 +89,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cartesian_to_spherical", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCartesianToSpherical:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCartesianToSpherical
+ inputs:
+ An instance of InputsCartesianToSpherical.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCartesianToSpherical:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCartesianToSpherical
+ outputs:
+ An instance of OutputsCartesianToSpherical.
"""
return super().outputs
@@ -130,12 +141,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -166,18 +178,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.cartesian_to_spherical()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/geo/cartesian_to_spherical_fc.py b/src/ansys/dpf/core/operators/geo/cartesian_to_spherical_fc.py
index 89132b26eda..13aecbb6a31 100644
--- a/src/ansys/dpf/core/operators/geo/cartesian_to_spherical_fc.py
+++ b/src/ansys/dpf/core/operators/geo/cartesian_to_spherical_fc.py
@@ -4,23 +4,28 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cartesian_to_spherical_fc(Operator):
- """Converts 3D field from cartesian coordinates to spherical coordinates.
+ r"""Converts 3D field from cartesian coordinates to spherical coordinates.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -50,10 +55,9 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = (
- """Converts 3D field from cartesian coordinates to spherical coordinates."""
- )
+ def _spec() -> Specification:
+ description = r"""Converts 3D field from cartesian coordinates to spherical coordinates.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,7 +65,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -69,14 +73,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -85,29 +89,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cartesian_to_spherical_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCartesianToSphericalFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCartesianToSphericalFc
+ inputs:
+ An instance of InputsCartesianToSphericalFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCartesianToSphericalFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCartesianToSphericalFc
+ outputs:
+ An instance of OutputsCartesianToSphericalFc.
"""
return super().outputs
@@ -132,12 +143,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,18 +182,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.cartesian_to_spherical_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/geo/element_nodal_contribution.py b/src/ansys/dpf/core/operators/geo/element_nodal_contribution.py
index c296ea0467e..9a57b2b26d2 100644
--- a/src/ansys/dpf/core/operators/geo/element_nodal_contribution.py
+++ b/src/ansys/dpf/core/operators/geo/element_nodal_contribution.py
@@ -4,35 +4,36 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class element_nodal_contribution(Operator):
- """Compute the fraction of the element measure attributed to each node of
- each element (fraction of the volume for 3D elements, fraction of
- the area for 2D elements or fraction of the length for 1D
- elements). It is computed by taking the integral of the shape
- function associated to each node within each element.
+ r"""Compute the fraction of the element measure attributed to each node of
+ each element (fraction of the volume for 3D elements, fraction of the
+ area for 2D elements or fraction of the length for 1D elements). It is
+ computed by taking the integral of the shape function associated to each
+ node within each element.
+
Parameters
----------
- mesh : MeshedRegion
- scoping : Scoping, optional
- Integrate the input field over a specific
- scoping.
- volume_fraction : bool, optional
- If true, returns influence volume, area or
- length. if false, the values are
- normalized with the element volume,
- area or length. default: true.
+ mesh: MeshedRegion
+ scoping: Scoping, optional
+ Integrate the input field over a specific scoping.
+ volume_fraction: bool, optional
+ If true, returns influence volume, area or length. If false, the values are normalized with the element volume, area or length. Default: true.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -76,13 +77,13 @@ def __init__(
self.inputs.volume_fraction.connect(volume_fraction)
@staticmethod
- def _spec():
- description = """Compute the fraction of the element measure attributed to each node of
- each element (fraction of the volume for 3D elements,
- fraction of the area for 2D elements or fraction of the
- length for 1D elements). It is computed by taking the
- integral of the shape function associated to each node
- within each element."""
+ def _spec() -> Specification:
+ description = r"""Compute the fraction of the element measure attributed to each node of
+each element (fraction of the volume for 3D elements, fraction of the
+area for 2D elements or fraction of the length for 1D elements). It is
+computed by taking the integral of the shape function associated to each
+node within each element.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -90,23 +91,19 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""Integrate the input field over a specific
- scoping.""",
+ document=r"""Integrate the input field over a specific scoping.""",
),
2: PinSpecification(
name="volume_fraction",
type_names=["bool"],
optional=True,
- document="""If true, returns influence volume, area or
- length. if false, the values are
- normalized with the element volume,
- area or length. default: true.""",
+ document=r"""If true, returns influence volume, area or length. If false, the values are normalized with the element volume, area or length. Default: true.""",
),
},
map_output_pin_spec={
@@ -114,14 +111,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -130,31 +127,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="element::nodal_contribution", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementNodalContribution:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementNodalContribution
+ inputs:
+ An instance of InputsElementNodalContribution.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementNodalContribution:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementNodalContribution
+ outputs:
+ An instance of OutputsElementNodalContribution.
"""
return super().outputs
@@ -189,12 +193,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._volume_fraction)
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -207,15 +212,15 @@ def mesh(self):
return self._mesh
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- Integrate the input field over a specific
- scoping.
+ Integrate the input field over a specific scoping.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -228,17 +233,15 @@ def scoping(self):
return self._scoping
@property
- def volume_fraction(self):
- """Allows to connect volume_fraction input to the operator.
+ def volume_fraction(self) -> Input:
+ r"""Allows to connect volume_fraction input to the operator.
- If true, returns influence volume, area or
- length. if false, the values are
- normalized with the element volume,
- area or length. default: true.
+ If true, returns influence volume, area or length. If false, the values are normalized with the element volume, area or length. Default: true.
- Parameters
- ----------
- my_volume_fraction : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -269,18 +272,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.element_nodal_contribution()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/geo/elements_facets_surfaces_over_time.py b/src/ansys/dpf/core/operators/geo/elements_facets_surfaces_over_time.py
index 9036263a9d4..6f6fda6143e 100644
--- a/src/ansys/dpf/core/operators/geo/elements_facets_surfaces_over_time.py
+++ b/src/ansys/dpf/core/operators/geo/elements_facets_surfaces_over_time.py
@@ -4,33 +4,36 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elements_facets_surfaces_over_time(Operator):
- """Calculates for a mesh, the surface of each element's facet over time
- for each specified time step. The output is a new mesh made with
- only surface elements.
+ r"""Calculates for a mesh, the surface of each element’s facet over time for
+ each specified time step. The output is a new mesh made with only
+ surface elements.
+
Parameters
----------
- scoping : Scoping, optional
- displacement : FieldsContainer, optional
+ scoping: Scoping, optional
+ displacement: FieldsContainer, optional
Displacement field's container.
- mesh : MeshedRegion, optional
- Mesh must be defined if the displacement
- field's container does not contain
- it, or if there is no displacement.
+ mesh: MeshedRegion, optional
+ Mesh must be defined if the displacement field's container does not contain it, or if there is no displacement.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Surfaces field.
- mesh : MeshedRegion
+ mesh: MeshedRegion
Mesh made of surface elements only.
Examples
@@ -74,10 +77,11 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Calculates for a mesh, the surface of each element's facet over time
- for each specified time step. The output is a new mesh
- made with only surface elements."""
+ def _spec() -> Specification:
+ description = r"""Calculates for a mesh, the surface of each element’s facet over time for
+each specified time step. The output is a new mesh made with only
+surface elements.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -85,21 +89,19 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="displacement",
type_names=["fields_container"],
optional=True,
- document="""Displacement field's container.""",
+ document=r"""Displacement field's container.""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""Mesh must be defined if the displacement
- field's container does not contain
- it, or if there is no displacement.""",
+ document=r"""Mesh must be defined if the displacement field's container does not contain it, or if there is no displacement.""",
),
},
map_output_pin_spec={
@@ -107,20 +109,20 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Surfaces field.""",
+ document=r"""Surfaces field.""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""Mesh made of surface elements only.""",
+ document=r"""Mesh made of surface elements only.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -129,29 +131,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="surfaces_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementsFacetsSurfacesOverTime:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementsFacetsSurfacesOverTime
+ inputs:
+ An instance of InputsElementsFacetsSurfacesOverTime.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementsFacetsSurfacesOverTime:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementsFacetsSurfacesOverTime
+ outputs:
+ An instance of OutputsElementsFacetsSurfacesOverTime.
"""
return super().outputs
@@ -188,12 +197,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -206,14 +216,15 @@ def scoping(self):
return self._scoping
@property
- def displacement(self):
- """Allows to connect displacement input to the operator.
+ def displacement(self) -> Input:
+ r"""Allows to connect displacement input to the operator.
Displacement field's container.
- Parameters
- ----------
- my_displacement : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -226,16 +237,15 @@ def displacement(self):
return self._displacement
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh must be defined if the displacement
- field's container does not contain
- it, or if there is no displacement.
+ Mesh must be defined if the displacement field's container does not contain it, or if there is no displacement.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -273,35 +283,41 @@ def __init__(self, op: Operator):
self._outputs.append(self._mesh)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ Surfaces field.
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.elements_facets_surfaces_over_time()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
@property
- def mesh(self):
- """Allows to get mesh output of the operator
+ def mesh(self) -> Output:
+ r"""Allows to get mesh output of the operator
+
+ Mesh made of surface elements only.
Returns
- ----------
- my_mesh : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.elements_facets_surfaces_over_time()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mesh = op.outputs.mesh()
- """ # noqa: E501
+ """
return self._mesh
diff --git a/src/ansys/dpf/core/operators/geo/elements_volume.py b/src/ansys/dpf/core/operators/geo/elements_volume.py
index a077e7b8220..359a6abb771 100644
--- a/src/ansys/dpf/core/operators/geo/elements_volume.py
+++ b/src/ansys/dpf/core/operators/geo/elements_volume.py
@@ -4,30 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elements_volume(Operator):
- """Compute the measure of the Elements (volume for 3D elements, surface
- for 2D elements or length for 1D elements) using default shape
- functions, except for polyhedrons.
+ r"""Compute the measure of the Elements (volume for 3D elements, surface for
+ 2D elements or length for 1D elements) using default shape functions,
+ except for polyhedrons.
+
Parameters
----------
- mesh : MeshedRegion
- mesh_scoping : Scoping
- If not provided, the measure of all elements
- for the mesh is computed. if
- provided, the scoping needs to have
- "elemental" location.
+ mesh: MeshedRegion
+ mesh_scoping: Scoping
+ If not provided, the measure of all elements for the mesh is computed. If provided, the Scoping needs to have "Elemental" location.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -62,10 +64,11 @@ def __init__(self, mesh=None, mesh_scoping=None, config=None, server=None):
self.inputs.mesh_scoping.connect(mesh_scoping)
@staticmethod
- def _spec():
- description = """Compute the measure of the Elements (volume for 3D elements, surface
- for 2D elements or length for 1D elements) using default
- shape functions, except for polyhedrons."""
+ def _spec() -> Specification:
+ description = r"""Compute the measure of the Elements (volume for 3D elements, surface for
+2D elements or length for 1D elements) using default shape functions,
+except for polyhedrons.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -73,16 +76,13 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=False,
- document="""If not provided, the measure of all elements
- for the mesh is computed. if
- provided, the scoping needs to have
- "elemental" location.""",
+ document=r"""If not provided, the measure of all elements for the mesh is computed. If provided, the Scoping needs to have "Elemental" location.""",
),
},
map_output_pin_spec={
@@ -90,14 +90,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -106,29 +106,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="element::volume", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementsVolume:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementsVolume
+ inputs:
+ An instance of InputsElementsVolume.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementsVolume:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementsVolume
+ outputs:
+ An instance of OutputsElementsVolume.
"""
return super().outputs
@@ -155,12 +162,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh_scoping)
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -173,17 +181,15 @@ def mesh(self):
return self._mesh
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- If not provided, the measure of all elements
- for the mesh is computed. if
- provided, the scoping needs to have
- "elemental" location.
+ If not provided, the measure of all elements for the mesh is computed. If provided, the Scoping needs to have "Elemental" location.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -214,18 +220,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.elements_volume()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/geo/elements_volumes_over_time.py b/src/ansys/dpf/core/operators/geo/elements_volumes_over_time.py
index 4f48b173962..b238cb12b84 100644
--- a/src/ansys/dpf/core/operators/geo/elements_volumes_over_time.py
+++ b/src/ansys/dpf/core/operators/geo/elements_volumes_over_time.py
@@ -4,32 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elements_volumes_over_time(Operator):
- """Calculates for a mesh, the volume of each element over time for each
+ r"""Calculates for a mesh, the volume of each element over time for each
specified time step.
+
Parameters
----------
- scoping : Scoping, optional
- displacement : FieldsContainer, optional
- Displacement field's container. must contain
- the mesh if mesh not specified in
- input.
- mesh : MeshedRegion, optional
- Mesh must be defined if the displacement
- field's container does not contain
- it, or if there is no displacement.
+ scoping: Scoping, optional
+ displacement: FieldsContainer, optional
+ Displacement field's container. Must contain the mesh if mesh not specified in input.
+ mesh: MeshedRegion, optional
+ Mesh must be defined if the displacement field's container does not contain it, or if there is no displacement.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -71,9 +72,10 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Calculates for a mesh, the volume of each element over time for each
- specified time step."""
+ def _spec() -> Specification:
+ description = r"""Calculates for a mesh, the volume of each element over time for each
+specified time step.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -81,23 +83,19 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="displacement",
type_names=["fields_container"],
optional=True,
- document="""Displacement field's container. must contain
- the mesh if mesh not specified in
- input.""",
+ document=r"""Displacement field's container. Must contain the mesh if mesh not specified in input.""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""Mesh must be defined if the displacement
- field's container does not contain
- it, or if there is no displacement.""",
+ document=r"""Mesh must be defined if the displacement field's container does not contain it, or if there is no displacement.""",
),
},
map_output_pin_spec={
@@ -105,14 +103,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -121,29 +119,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="volumes_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementsVolumesOverTime:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementsVolumesOverTime
+ inputs:
+ An instance of InputsElementsVolumesOverTime.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementsVolumesOverTime:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementsVolumesOverTime
+ outputs:
+ An instance of OutputsElementsVolumesOverTime.
"""
return super().outputs
@@ -178,12 +183,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -196,16 +202,15 @@ def scoping(self):
return self._scoping
@property
- def displacement(self):
- """Allows to connect displacement input to the operator.
+ def displacement(self) -> Input:
+ r"""Allows to connect displacement input to the operator.
- Displacement field's container. must contain
- the mesh if mesh not specified in
- input.
+ Displacement field's container. Must contain the mesh if mesh not specified in input.
- Parameters
- ----------
- my_displacement : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -218,16 +223,15 @@ def displacement(self):
return self._displacement
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh must be defined if the displacement
- field's container does not contain
- it, or if there is no displacement.
+ Mesh must be defined if the displacement field's container does not contain it, or if there is no displacement.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -260,18 +264,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.elements_volumes_over_time()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/geo/faces_area.py b/src/ansys/dpf/core/operators/geo/faces_area.py
index 7bcc219aba0..456c2332375 100644
--- a/src/ansys/dpf/core/operators/geo/faces_area.py
+++ b/src/ansys/dpf/core/operators/geo/faces_area.py
@@ -4,30 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class faces_area(Operator):
- """Compute the measure of the Faces (surface for 2D faces of a 3D model
- or length for 1D faces of a 2D model) using default shape
- functions, except for polygons.
+ r"""Compute the measure of the Faces (surface for 2D faces of a 3D model or
+ length for 1D faces of a 2D model) using default shape functions, except
+ for polygons.
+
Parameters
----------
- mesh : MeshedRegion
- mesh_scoping : Scoping
- If not provided, the measure of all faces in
- the mesh is computed. if provided,
- the scoping needs to have "faces"
- location.
+ mesh: MeshedRegion
+ mesh_scoping: Scoping
+ If not provided, the measure of all Faces in the mesh is computed. If provided, the Scoping needs to have "Faces" location.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -62,10 +64,11 @@ def __init__(self, mesh=None, mesh_scoping=None, config=None, server=None):
self.inputs.mesh_scoping.connect(mesh_scoping)
@staticmethod
- def _spec():
- description = """Compute the measure of the Faces (surface for 2D faces of a 3D model
- or length for 1D faces of a 2D model) using default shape
- functions, except for polygons."""
+ def _spec() -> Specification:
+ description = r"""Compute the measure of the Faces (surface for 2D faces of a 3D model or
+length for 1D faces of a 2D model) using default shape functions, except
+for polygons.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -73,16 +76,13 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=False,
- document="""If not provided, the measure of all faces in
- the mesh is computed. if provided,
- the scoping needs to have "faces"
- location.""",
+ document=r"""If not provided, the measure of all Faces in the mesh is computed. If provided, the Scoping needs to have "Faces" location.""",
),
},
map_output_pin_spec={
@@ -90,14 +90,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -106,29 +106,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="face::area", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFacesArea:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFacesArea
+ inputs:
+ An instance of InputsFacesArea.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFacesArea:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFacesArea
+ outputs:
+ An instance of OutputsFacesArea.
"""
return super().outputs
@@ -155,12 +162,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh_scoping)
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -173,17 +181,15 @@ def mesh(self):
return self._mesh
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- If not provided, the measure of all faces in
- the mesh is computed. if provided,
- the scoping needs to have "faces"
- location.
+ If not provided, the measure of all Faces in the mesh is computed. If provided, the Scoping needs to have "Faces" location.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -214,18 +220,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.faces_area()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/geo/gauss_to_node.py b/src/ansys/dpf/core/operators/geo/gauss_to_node.py
index fe55a50d9db..6c7454ef9cf 100644
--- a/src/ansys/dpf/core/operators/geo/gauss_to_node.py
+++ b/src/ansys/dpf/core/operators/geo/gauss_to_node.py
@@ -4,31 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class gauss_to_node(Operator):
- """Extrapolating results available at Gauss or quadrature points to nodal
- points for one field. The available elements are: Linear
- quadrangle, parabolic quadrangle, linear hexagonal, quadratic
- hexagonal, linear tetrahedral, and quadratic tetrahedral
+ r"""Extrapolating results available at Gauss or quadrature points to nodal
+ points for one field. The available elements are: Linear quadrangle,
+ parabolic quadrangle, linear hexagonal, quadratic hexagonal, linear
+ tetrahedral, and quadratic tetrahedral
+
Parameters
----------
- field : Field
- scoping : Scoping, optional
- Scoping to integrate on, if not provided, the
- one from input field is provided.
- mesh : MeshedRegion, optional
+ field: Field
+ scoping: Scoping, optional
+ Scoping to integrate on, if not provided, the one from input field is provided.
+ mesh: MeshedRegion, optional
Mesh to integrate on.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -68,12 +72,12 @@ def __init__(self, field=None, scoping=None, mesh=None, config=None, server=None
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Extrapolating results available at Gauss or quadrature points to nodal
- points for one field. The available elements are: Linear
- quadrangle, parabolic quadrangle, linear hexagonal,
- quadratic hexagonal, linear tetrahedral, and quadratic
- tetrahedral"""
+ def _spec() -> Specification:
+ description = r"""Extrapolating results available at Gauss or quadrature points to nodal
+points for one field. The available elements are: Linear quadrangle,
+parabolic quadrangle, linear hexagonal, quadratic hexagonal, linear
+tetrahedral, and quadratic tetrahedral
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -81,20 +85,19 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""Scoping to integrate on, if not provided, the
- one from input field is provided.""",
+ document=r"""Scoping to integrate on, if not provided, the one from input field is provided.""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""Mesh to integrate on.""",
+ document=r"""Mesh to integrate on.""",
),
},
map_output_pin_spec={
@@ -102,14 +105,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -118,29 +121,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="gauss_to_node", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsGaussToNode:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsGaussToNode
+ inputs:
+ An instance of InputsGaussToNode.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsGaussToNode:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsGaussToNode
+ outputs:
+ An instance of OutputsGaussToNode.
"""
return super().outputs
@@ -171,12 +181,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -189,15 +200,15 @@ def field(self):
return self._field
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- Scoping to integrate on, if not provided, the
- one from input field is provided.
+ Scoping to integrate on, if not provided, the one from input field is provided.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -210,14 +221,15 @@ def scoping(self):
return self._scoping
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
Mesh to integrate on.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -248,18 +260,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.gauss_to_node()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/geo/integrate_over_elements.py b/src/ansys/dpf/core/operators/geo/integrate_over_elements.py
index 30b1da980c1..4375703c2f3 100644
--- a/src/ansys/dpf/core/operators/geo/integrate_over_elements.py
+++ b/src/ansys/dpf/core/operators/geo/integrate_over_elements.py
@@ -4,29 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class integrate_over_elements(Operator):
- """Integration of an input field over mesh.
+ r"""Integration of an input field over mesh.
+
Parameters
----------
- field : Field
- scoping : Scoping, optional
- Integrate the input field over a specific
- scoping.
- mesh : MeshedRegion, optional
- Mesh to integrate on. if not provided, the
- one from input field is employed.
+ field: Field
+ scoping: Scoping, optional
+ Integrate the input field over a specific scoping.
+ mesh: MeshedRegion, optional
+ Mesh to integrate on. If not provided, the one from input field is employed.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -66,8 +69,9 @@ def __init__(self, field=None, scoping=None, mesh=None, config=None, server=None
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Integration of an input field over mesh."""
+ def _spec() -> Specification:
+ description = r"""Integration of an input field over mesh.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -75,21 +79,19 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""Integrate the input field over a specific
- scoping.""",
+ document=r"""Integrate the input field over a specific scoping.""",
),
2: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""Mesh to integrate on. if not provided, the
- one from input field is employed.""",
+ document=r"""Mesh to integrate on. If not provided, the one from input field is employed.""",
),
},
map_output_pin_spec={
@@ -97,14 +99,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -113,29 +115,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="element::integrate", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsIntegrateOverElements:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIntegrateOverElements
+ inputs:
+ An instance of InputsIntegrateOverElements.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIntegrateOverElements:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIntegrateOverElements
+ outputs:
+ An instance of OutputsIntegrateOverElements.
"""
return super().outputs
@@ -166,12 +175,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -184,15 +194,15 @@ def field(self):
return self._field
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- Integrate the input field over a specific
- scoping.
+ Integrate the input field over a specific scoping.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -205,15 +215,15 @@ def scoping(self):
return self._scoping
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh to integrate on. if not provided, the
- one from input field is employed.
+ Mesh to integrate on. If not provided, the one from input field is employed.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -244,18 +254,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.integrate_over_elements()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/geo/normals.py b/src/ansys/dpf/core/operators/geo/normals.py
index b60b06c1298..51ca9aa5449 100644
--- a/src/ansys/dpf/core/operators/geo/normals.py
+++ b/src/ansys/dpf/core/operators/geo/normals.py
@@ -4,27 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class normals(Operator):
- """compute the normals at the given nodes or element scoping based on the
- given mesh (first version, the element normal is only handled on
- the shell elements)
+ r"""compute the normals at the given nodes or element scoping based on the
+ given mesh (first version, the element normal is only handled on the
+ shell elements)
+
Parameters
----------
- mesh : MeshedRegion, optional
- mesh_scoping : Scoping, optional
- field : Field, optional
+ mesh: MeshedRegion, optional
+ mesh_scoping: Scoping, optional
+ field: Field, optional
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -66,10 +71,11 @@ def __init__(
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """compute the normals at the given nodes or element scoping based on the
- given mesh (first version, the element normal is only
- handled on the shell elements)"""
+ def _spec() -> Specification:
+ description = r"""compute the normals at the given nodes or element scoping based on the
+given mesh (first version, the element normal is only handled on the
+shell elements)
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -77,19 +83,19 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="field",
type_names=["field"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -97,14 +103,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -113,29 +119,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="normals_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsNormals:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsNormals
+ inputs:
+ An instance of InputsNormals.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsNormals:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsNormals
+ outputs:
+ An instance of OutputsNormals.
"""
return super().outputs
@@ -166,12 +179,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -184,12 +198,13 @@ def mesh(self):
return self._mesh
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -202,12 +217,13 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -238,18 +254,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.normals()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/geo/normals_provider_nl.py b/src/ansys/dpf/core/operators/geo/normals_provider_nl.py
index 3121a26cafc..267307213dd 100644
--- a/src/ansys/dpf/core/operators/geo/normals_provider_nl.py
+++ b/src/ansys/dpf/core/operators/geo/normals_provider_nl.py
@@ -4,33 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class normals_provider_nl(Operator):
- """Computes the normals on nodes/faces/elements based on integration
- points (more accurate for non-linear elements) on a skin mesh.
+ r"""Computes the normals on nodes/faces/elements based on integration points
+ (more accurate for non-linear elements) on a skin mesh.
+
Parameters
----------
- mesh : MeshedRegion
+ mesh: MeshedRegion
Skin, face, or shell mesh region.
- mesh_scoping : Scoping, optional
- Elemental, elementalnodal, or nodal scoping.
- location derived from this.
- requested_location : str, optional
- If no scoping, specifies location. if scoping
- is elemental or elementalnodal this
- overrides scoping. default is
- elemental.
+ mesh_scoping: Scoping, optional
+ Elemental, ElementalNodal, or Nodal scoping. Location derived from this.
+ requested_location: str, optional
+ If no scoping, specifies location. If scoping is Elemental or ElementalNodal this overrides scoping. Default is Elemental.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -77,10 +78,10 @@ def __init__(
self.inputs.requested_location.connect(requested_location)
@staticmethod
- def _spec():
- description = """Computes the normals on nodes/faces/elements based on integration
- points (more accurate for non-linear elements) on a skin
- mesh."""
+ def _spec() -> Specification:
+ description = r"""Computes the normals on nodes/faces/elements based on integration points
+(more accurate for non-linear elements) on a skin mesh.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,23 +89,19 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""Skin, face, or shell mesh region.""",
+ document=r"""Skin, face, or shell mesh region.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""Elemental, elementalnodal, or nodal scoping.
- location derived from this.""",
+ document=r"""Elemental, ElementalNodal, or Nodal scoping. Location derived from this.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""If no scoping, specifies location. if scoping
- is elemental or elementalnodal this
- overrides scoping. default is
- elemental.""",
+ document=r"""If no scoping, specifies location. If scoping is Elemental or ElementalNodal this overrides scoping. Default is Elemental.""",
),
},
map_output_pin_spec={
@@ -112,14 +109,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -128,29 +125,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="normals_provider_nl", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsNormalsProviderNl:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsNormalsProviderNl
+ inputs:
+ An instance of InputsNormalsProviderNl.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsNormalsProviderNl:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsNormalsProviderNl
+ outputs:
+ An instance of OutputsNormalsProviderNl.
"""
return super().outputs
@@ -183,14 +187,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._requested_location)
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
Skin, face, or shell mesh region.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -203,15 +208,15 @@ def mesh(self):
return self._mesh
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Elemental, elementalnodal, or nodal scoping.
- location derived from this.
+ Elemental, ElementalNodal, or Nodal scoping. Location derived from this.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -224,17 +229,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- If no scoping, specifies location. if scoping
- is elemental or elementalnodal this
- overrides scoping. default is
- elemental.
+ If no scoping, specifies location. If scoping is Elemental or ElementalNodal this overrides scoping. Default is Elemental.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -265,18 +268,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.normals_provider_nl()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/geo/rotate.py b/src/ansys/dpf/core/operators/geo/rotate.py
index 649c14cec4c..04ec742ac8a 100644
--- a/src/ansys/dpf/core/operators/geo/rotate.py
+++ b/src/ansys/dpf/core/operators/geo/rotate.py
@@ -4,27 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class rotate(Operator):
- """Applies a transformation (rotation) matrix on a field.
+ r"""Applies a transformation (rotation) matrix on a field.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- field_rotation_matrix : Field
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ field_rotation_matrix: Field
3-3 rotation matrix
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -61,8 +65,9 @@ def __init__(
self.inputs.field_rotation_matrix.connect(field_rotation_matrix)
@staticmethod
- def _spec():
- description = """Applies a transformation (rotation) matrix on a field."""
+ def _spec() -> Specification:
+ description = r"""Applies a transformation (rotation) matrix on a field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -70,14 +75,13 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="field_rotation_matrix",
type_names=["field"],
optional=False,
- document="""3-3 rotation matrix""",
+ document=r"""3-3 rotation matrix""",
),
},
map_output_pin_spec={
@@ -85,14 +89,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -101,29 +105,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="rotate", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsRotate:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsRotate
+ inputs:
+ An instance of InputsRotate.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsRotate:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsRotate
+ outputs:
+ An instance of OutputsRotate.
"""
return super().outputs
@@ -150,15 +161,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field_rotation_matrix)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -171,14 +182,15 @@ def field(self):
return self._field
@property
- def field_rotation_matrix(self):
- """Allows to connect field_rotation_matrix input to the operator.
+ def field_rotation_matrix(self) -> Input:
+ r"""Allows to connect field_rotation_matrix input to the operator.
3-3 rotation matrix
- Parameters
- ----------
- my_field_rotation_matrix : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -209,18 +221,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.rotate()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/geo/rotate_fc.py b/src/ansys/dpf/core/operators/geo/rotate_fc.py
index 6665beb0472..81544f40c78 100644
--- a/src/ansys/dpf/core/operators/geo/rotate_fc.py
+++ b/src/ansys/dpf/core/operators/geo/rotate_fc.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class rotate_fc(Operator):
- """Apply a transformation (rotation) matrix on all the fields of a fields
+ r"""Apply a transformation (rotation) matrix on all the fields of a fields
container.
+
Parameters
----------
- fields_container : FieldsContainer
- coordinate_system : Field
+ fields_container: FieldsContainer
+ coordinate_system: Field
3-3 rotation matrix
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -60,9 +65,10 @@ def __init__(
self.inputs.coordinate_system.connect(coordinate_system)
@staticmethod
- def _spec():
- description = """Apply a transformation (rotation) matrix on all the fields of a fields
- container."""
+ def _spec() -> Specification:
+ description = r"""Apply a transformation (rotation) matrix on all the fields of a fields
+container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -70,13 +76,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="coordinate_system",
type_names=["field"],
optional=False,
- document="""3-3 rotation matrix""",
+ document=r"""3-3 rotation matrix""",
),
},
map_output_pin_spec={
@@ -84,14 +90,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -100,29 +106,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="rotate_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsRotateFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsRotateFc
+ inputs:
+ An instance of InputsRotateFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsRotateFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsRotateFc
+ outputs:
+ An instance of OutputsRotateFc.
"""
return super().outputs
@@ -149,12 +162,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._coordinate_system)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -167,14 +181,15 @@ def fields_container(self):
return self._fields_container
@property
- def coordinate_system(self):
- """Allows to connect coordinate_system input to the operator.
+ def coordinate_system(self) -> Input:
+ r"""Allows to connect coordinate_system input to the operator.
3-3 rotation matrix
- Parameters
- ----------
- my_coordinate_system : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -205,18 +220,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.rotate_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs.py b/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs.py
index 470c7040211..1bb327e5aad 100644
--- a/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs.py
+++ b/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs.py
@@ -4,35 +4,36 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class rotate_in_cylindrical_cs(Operator):
- """Rotates a field to its corresponding values into the specified
- cylindrical coordinate system (corresponding to the field
- position). If a coordinate system is not set in the
- coordinate_system pin, the field is rotated on each node following
- the local polar coordinate system.
+ r"""Rotates a field to its corresponding values into the specified
+ cylindrical coordinate system (corresponding to the field position). If
+ a coordinate system is not set in the coordinate_system pin, the field
+ is rotated on each node following the local polar coordinate system.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- coordinate_system : Field, optional
- 3-3 rotation matrix and origin coordinates
- must be set here to define a
- coordinate system.
- mesh : MeshedRegion, optional
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ coordinate_system: Field, optional
+ 3-3 rotation matrix and origin coordinates must be set here to define a coordinate system.
+ mesh: MeshedRegion, optional
Mesh support of the input field.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -74,12 +75,12 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Rotates a field to its corresponding values into the specified
- cylindrical coordinate system (corresponding to the field
- position). If a coordinate system is not set in the
- coordinate_system pin, the field is rotated on each node
- following the local polar coordinate system."""
+ def _spec() -> Specification:
+ description = r"""Rotates a field to its corresponding values into the specified
+cylindrical coordinate system (corresponding to the field position). If
+a coordinate system is not set in the coordinate_system pin, the field
+is rotated on each node following the local polar coordinate system.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -87,22 +88,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="coordinate_system",
type_names=["field"],
optional=True,
- document="""3-3 rotation matrix and origin coordinates
- must be set here to define a
- coordinate system.""",
+ document=r"""3-3 rotation matrix and origin coordinates must be set here to define a coordinate system.""",
),
2: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""Mesh support of the input field.""",
+ document=r"""Mesh support of the input field.""",
),
},
map_output_pin_spec={
@@ -110,14 +108,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -126,29 +124,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="transform_cylindricalCS", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsRotateInCylindricalCs:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsRotateInCylindricalCs
+ inputs:
+ An instance of InputsRotateInCylindricalCs.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsRotateInCylindricalCs:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsRotateInCylindricalCs
+ outputs:
+ An instance of OutputsRotateInCylindricalCs.
"""
return super().outputs
@@ -181,15 +186,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -202,16 +207,15 @@ def field(self):
return self._field
@property
- def coordinate_system(self):
- """Allows to connect coordinate_system input to the operator.
+ def coordinate_system(self) -> Input:
+ r"""Allows to connect coordinate_system input to the operator.
- 3-3 rotation matrix and origin coordinates
- must be set here to define a
- coordinate system.
+ 3-3 rotation matrix and origin coordinates must be set here to define a coordinate system.
- Parameters
- ----------
- my_coordinate_system : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -224,14 +228,15 @@ def coordinate_system(self):
return self._coordinate_system
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
Mesh support of the input field.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -262,18 +267,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.rotate_in_cylindrical_cs()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs_fc.py b/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs_fc.py
index f695e3b895d..513ceec4ffc 100644
--- a/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs_fc.py
+++ b/src/ansys/dpf/core/operators/geo/rotate_in_cylindrical_cs_fc.py
@@ -4,35 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class rotate_in_cylindrical_cs_fc(Operator):
- """Rotates all the fields of a fields container (not defined with a
- cynlindrical coordinate system) to its corresponding values into
- the specified cylindrical coordinate system (corresponding to the
- field position). If a coordinate system is not set in the
- coordinate_system pin, the field is rotated on each node following
- the local polar coordinate system.
+ r"""Rotates all the fields of a fields container (not defined with a
+ cynlindrical coordinate system) to its corresponding values into the
+ specified cylindrical coordinate system (corresponding to the field
+ position). If a coordinate system is not set in the coordinate_system
+ pin, the field is rotated on each node following the local polar
+ coordinate system.
+
Parameters
----------
- field : Field or FieldsContainer
- coordinate_system : Field, optional
- 3-3 rotation matrix and origin coordinates
- must be set here to define a
- coordinate system.
- mesh : MeshedRegion, optional
- Mesh support of the input fields_container,
- in case it does not have one defined.
+ field: Field or FieldsContainer
+ coordinate_system: Field, optional
+ 3-3 rotation matrix and origin coordinates must be set here to define a coordinate system.
+ mesh: MeshedRegion, optional
+ Mesh support of the input fields_container, in case it does not have one defined.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -76,14 +78,14 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Rotates all the fields of a fields container (not defined with a
- cynlindrical coordinate system) to its corresponding
- values into the specified cylindrical coordinate system
- (corresponding to the field position). If a coordinate
- system is not set in the coordinate_system pin, the field
- is rotated on each node following the local polar
- coordinate system."""
+ def _spec() -> Specification:
+ description = r"""Rotates all the fields of a fields container (not defined with a
+cynlindrical coordinate system) to its corresponding values into the
+specified cylindrical coordinate system (corresponding to the field
+position). If a coordinate system is not set in the coordinate_system
+pin, the field is rotated on each node following the local polar
+coordinate system.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -91,22 +93,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="coordinate_system",
type_names=["field"],
optional=True,
- document="""3-3 rotation matrix and origin coordinates
- must be set here to define a
- coordinate system.""",
+ document=r"""3-3 rotation matrix and origin coordinates must be set here to define a coordinate system.""",
),
2: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""Mesh support of the input fields_container,
- in case it does not have one defined.""",
+ document=r"""Mesh support of the input fields_container, in case it does not have one defined.""",
),
},
map_output_pin_spec={
@@ -114,14 +113,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -130,31 +129,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="transform_cylindrical_cs_fc", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsRotateInCylindricalCsFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsRotateInCylindricalCsFc
+ inputs:
+ An instance of InputsRotateInCylindricalCsFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsRotateInCylindricalCsFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsRotateInCylindricalCsFc
+ outputs:
+ An instance of OutputsRotateInCylindricalCsFc.
"""
return super().outputs
@@ -187,12 +193,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -205,16 +212,15 @@ def field(self):
return self._field
@property
- def coordinate_system(self):
- """Allows to connect coordinate_system input to the operator.
+ def coordinate_system(self) -> Input:
+ r"""Allows to connect coordinate_system input to the operator.
- 3-3 rotation matrix and origin coordinates
- must be set here to define a
- coordinate system.
+ 3-3 rotation matrix and origin coordinates must be set here to define a coordinate system.
- Parameters
- ----------
- my_coordinate_system : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -227,15 +233,15 @@ def coordinate_system(self):
return self._coordinate_system
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh support of the input fields_container,
- in case it does not have one defined.
+ Mesh support of the input fields_container, in case it does not have one defined.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -268,18 +274,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.rotate_in_cylindrical_cs_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/geo/spherical_to_cartesian.py b/src/ansys/dpf/core/operators/geo/spherical_to_cartesian.py
index 206e7d0efe9..89dc448ca45 100644
--- a/src/ansys/dpf/core/operators/geo/spherical_to_cartesian.py
+++ b/src/ansys/dpf/core/operators/geo/spherical_to_cartesian.py
@@ -4,23 +4,28 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class spherical_to_cartesian(Operator):
- """Converts 3D field from spherical coordinates to cartesian coordinates.
+ r"""Converts 3D field from spherical coordinates to cartesian coordinates.
+
Parameters
----------
- field : Field or FieldsContainer
+ field: Field or FieldsContainer
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -50,10 +55,9 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = (
- """Converts 3D field from spherical coordinates to cartesian coordinates."""
- )
+ def _spec() -> Specification:
+ description = r"""Converts 3D field from spherical coordinates to cartesian coordinates.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,7 +65,7 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -69,14 +73,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -85,29 +89,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="spherical_to_cartesian", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSphericalToCartesian:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSphericalToCartesian
+ inputs:
+ An instance of InputsSphericalToCartesian.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSphericalToCartesian:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSphericalToCartesian
+ outputs:
+ An instance of OutputsSphericalToCartesian.
"""
return super().outputs
@@ -130,12 +141,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -166,18 +178,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.spherical_to_cartesian()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/geo/spherical_to_cartesian_fc.py b/src/ansys/dpf/core/operators/geo/spherical_to_cartesian_fc.py
index c402d1e3f5e..7fe207918bb 100644
--- a/src/ansys/dpf/core/operators/geo/spherical_to_cartesian_fc.py
+++ b/src/ansys/dpf/core/operators/geo/spherical_to_cartesian_fc.py
@@ -4,23 +4,28 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class spherical_to_cartesian_fc(Operator):
- """Converts 3D field from spherical coordinates to cartesian coordinates.
+ r"""Converts 3D field from spherical coordinates to cartesian coordinates.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -50,10 +55,9 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = (
- """Converts 3D field from spherical coordinates to cartesian coordinates."""
- )
+ def _spec() -> Specification:
+ description = r"""Converts 3D field from spherical coordinates to cartesian coordinates.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,7 +65,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -69,14 +73,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -85,29 +89,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="spherical_to_cartesian_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSphericalToCartesianFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSphericalToCartesianFc
+ inputs:
+ An instance of InputsSphericalToCartesianFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSphericalToCartesianFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSphericalToCartesianFc
+ outputs:
+ An instance of OutputsSphericalToCartesianFc.
"""
return super().outputs
@@ -132,12 +143,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,18 +182,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.spherical_to_cartesian_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/geo/to_polar_coordinates.py b/src/ansys/dpf/core/operators/geo/to_polar_coordinates.py
index fa7bf672f88..5ba3b820d68 100644
--- a/src/ansys/dpf/core/operators/geo/to_polar_coordinates.py
+++ b/src/ansys/dpf/core/operators/geo/to_polar_coordinates.py
@@ -4,33 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class to_polar_coordinates(Operator):
- """Finds r, theta (rad), and z coordinates of a coordinates (nodal) field
- in a cartesian coordinates system where the input coordinate
- system defines the rotation axis and the origin.
+ r"""Finds r, theta (rad), and z coordinates of a coordinates (nodal) field
+ in a cartesian coordinates system where the input coordinate system
+ defines the rotation axis and the origin.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- coordinate_system : Field, optional
- 3-3 rotation matrix and origin coordinates
- must be set here to define a
- coordinate system. by default, the
- rotation axis is the z axis and the
- origin is [0,0,0].
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ coordinate_system: Field, optional
+ 3-3 rotation matrix and origin coordinates must be set here to define a coordinate system. By default, the rotation axis is the z axis and the origin is [0,0,0].
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -65,11 +65,11 @@ def __init__(self, field=None, coordinate_system=None, config=None, server=None)
self.inputs.coordinate_system.connect(coordinate_system)
@staticmethod
- def _spec():
- description = """Finds r, theta (rad), and z coordinates of a coordinates (nodal) field
- in a cartesian coordinates system where the input
- coordinate system defines the rotation axis and the
- origin."""
+ def _spec() -> Specification:
+ description = r"""Finds r, theta (rad), and z coordinates of a coordinates (nodal) field
+in a cartesian coordinates system where the input coordinate system
+defines the rotation axis and the origin.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -77,18 +77,13 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="coordinate_system",
type_names=["field"],
optional=True,
- document="""3-3 rotation matrix and origin coordinates
- must be set here to define a
- coordinate system. by default, the
- rotation axis is the z axis and the
- origin is [0,0,0].""",
+ document=r"""3-3 rotation matrix and origin coordinates must be set here to define a coordinate system. By default, the rotation axis is the z axis and the origin is [0,0,0].""",
),
},
map_output_pin_spec={
@@ -96,14 +91,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -112,29 +107,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="polar_coordinates", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsToPolarCoordinates:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsToPolarCoordinates
+ inputs:
+ An instance of InputsToPolarCoordinates.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsToPolarCoordinates:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsToPolarCoordinates
+ outputs:
+ An instance of OutputsToPolarCoordinates.
"""
return super().outputs
@@ -163,15 +165,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._coordinate_system)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -184,18 +186,15 @@ def field(self):
return self._field
@property
- def coordinate_system(self):
- """Allows to connect coordinate_system input to the operator.
+ def coordinate_system(self) -> Input:
+ r"""Allows to connect coordinate_system input to the operator.
- 3-3 rotation matrix and origin coordinates
- must be set here to define a
- coordinate system. by default, the
- rotation axis is the z axis and the
- origin is [0,0,0].
+ 3-3 rotation matrix and origin coordinates must be set here to define a coordinate system. By default, the rotation axis is the z axis and the origin is [0,0,0].
- Parameters
- ----------
- my_coordinate_system : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -226,18 +225,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.geo.to_polar_coordinates()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/invariant/convertnum_bcs_to_nod.py b/src/ansys/dpf/core/operators/invariant/convertnum_bcs_to_nod.py
index 246c7cdbbfc..3d91dff3a1d 100644
--- a/src/ansys/dpf/core/operators/invariant/convertnum_bcs_to_nod.py
+++ b/src/ansys/dpf/core/operators/invariant/convertnum_bcs_to_nod.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class convertnum_bcs_to_nod(Operator):
- """Converts a fields container from BCS to NOD ordering.
+ r"""Converts a fields container from BCS to NOD ordering.
+
Parameters
----------
- fields_container : FieldsContainer
- Fields_container
- data_sources : DataSources
+ fields_container: FieldsContainer
+ fields_container
+ data_sources: DataSources
Data_sources (must contain the full file).
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -60,8 +65,9 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Converts a fields container from BCS to NOD ordering."""
+ def _spec() -> Specification:
+ description = r"""Converts a fields container from BCS to NOD ordering.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -69,13 +75,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fields_container""",
+ document=r"""fields_container""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data_sources (must contain the full file).""",
+ document=r"""Data_sources (must contain the full file).""",
),
},
map_output_pin_spec={
@@ -83,14 +89,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -99,29 +105,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="convertnum_bcs_to_nod", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsConvertnumBcsToNod:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsConvertnumBcsToNod
+ inputs:
+ An instance of InputsConvertnumBcsToNod.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsConvertnumBcsToNod:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsConvertnumBcsToNod
+ outputs:
+ An instance of OutputsConvertnumBcsToNod.
"""
return super().outputs
@@ -152,14 +165,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields_container
+ fields_container
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -172,14 +186,15 @@ def fields_container(self):
return self._fields_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
Data_sources (must contain the full file).
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -212,18 +227,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.convertnum_bcs_to_nod()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/invariant/convertnum_nod_to_bcs.py b/src/ansys/dpf/core/operators/invariant/convertnum_nod_to_bcs.py
index 177ef06c7cd..6ca4c7ec6ca 100644
--- a/src/ansys/dpf/core/operators/invariant/convertnum_nod_to_bcs.py
+++ b/src/ansys/dpf/core/operators/invariant/convertnum_nod_to_bcs.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class convertnum_nod_to_bcs(Operator):
- """Converts a fields container from NOD to BCS ordering.
+ r"""Converts a fields container from NOD to BCS ordering.
+
Parameters
----------
- fields_container : FieldsContainer
- Fields_container
- data_sources : DataSources
+ fields_container: FieldsContainer
+ fields_container
+ data_sources: DataSources
Data_sources (must contain the full file).
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -60,8 +65,9 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Converts a fields container from NOD to BCS ordering."""
+ def _spec() -> Specification:
+ description = r"""Converts a fields container from NOD to BCS ordering.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -69,13 +75,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fields_container""",
+ document=r"""fields_container""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data_sources (must contain the full file).""",
+ document=r"""Data_sources (must contain the full file).""",
),
},
map_output_pin_spec={
@@ -83,14 +89,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -99,29 +105,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="convertnum_nod_to_bcs", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsConvertnumNodToBcs:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsConvertnumNodToBcs
+ inputs:
+ An instance of InputsConvertnumNodToBcs.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsConvertnumNodToBcs:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsConvertnumNodToBcs
+ outputs:
+ An instance of OutputsConvertnumNodToBcs.
"""
return super().outputs
@@ -152,14 +165,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields_container
+ fields_container
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -172,14 +186,15 @@ def fields_container(self):
return self._fields_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
Data_sources (must contain the full file).
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -212,18 +227,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.convertnum_nod_to_bcs()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/invariant/convertnum_op.py b/src/ansys/dpf/core/operators/invariant/convertnum_op.py
index c2589abbec2..3860084a52d 100644
--- a/src/ansys/dpf/core/operators/invariant/convertnum_op.py
+++ b/src/ansys/dpf/core/operators/invariant/convertnum_op.py
@@ -4,31 +4,36 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class convertnum_op(Operator):
- """Converts a fields container from one mapdl ordering to another mapdl
+ r"""Converts a fields container from one mapdl ordering to another mapdl
ordering. Supported mapdl ordering are BCS=0, FUL=1, NOD=2.
+
Parameters
----------
- input_ordering : int
+ input_ordering: int
Input ordering number
- output_ordering : int
+ output_ordering: int
Output ordering number
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Expect fields container
- data_sources : DataSources
+ data_sources: DataSources
Data_sources (must contain the full file).
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -81,10 +86,10 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Converts a fields container from one mapdl ordering to another mapdl
- ordering. Supported mapdl ordering are BCS=0, FUL=1,
- NOD=2."""
+ def _spec() -> Specification:
+ description = r"""Converts a fields container from one mapdl ordering to another mapdl
+ordering. Supported mapdl ordering are BCS=0, FUL=1, NOD=2.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -92,25 +97,25 @@ def _spec():
name="input_ordering",
type_names=["int32"],
optional=False,
- document="""Input ordering number""",
+ document=r"""Input ordering number""",
),
1: PinSpecification(
name="output_ordering",
type_names=["int32"],
optional=False,
- document="""Output ordering number""",
+ document=r"""Output ordering number""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Expect fields container""",
+ document=r"""Expect fields container""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data_sources (must contain the full file).""",
+ document=r"""Data_sources (must contain the full file).""",
),
},
map_output_pin_spec={
@@ -118,14 +123,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -134,29 +139,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="convertnum_op", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsConvertnumOp:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsConvertnumOp
+ inputs:
+ An instance of InputsConvertnumOp.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsConvertnumOp:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsConvertnumOp
+ outputs:
+ An instance of OutputsConvertnumOp.
"""
return super().outputs
@@ -191,14 +203,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def input_ordering(self):
- """Allows to connect input_ordering input to the operator.
+ def input_ordering(self) -> Input:
+ r"""Allows to connect input_ordering input to the operator.
Input ordering number
- Parameters
- ----------
- my_input_ordering : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -211,14 +224,15 @@ def input_ordering(self):
return self._input_ordering
@property
- def output_ordering(self):
- """Allows to connect output_ordering input to the operator.
+ def output_ordering(self) -> Input:
+ r"""Allows to connect output_ordering input to the operator.
Output ordering number
- Parameters
- ----------
- my_output_ordering : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -231,14 +245,15 @@ def output_ordering(self):
return self._output_ordering
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
Expect fields container
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -251,14 +266,15 @@ def fields_container(self):
return self._fields_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
Data_sources (must contain the full file).
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -289,18 +305,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.convertnum_op()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/invariant/eigen_values.py b/src/ansys/dpf/core/operators/invariant/eigen_values.py
index 18f8ac092be..234d0ff3afe 100644
--- a/src/ansys/dpf/core/operators/invariant/eigen_values.py
+++ b/src/ansys/dpf/core/operators/invariant/eigen_values.py
@@ -4,25 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class eigen_values(Operator):
- """Computes the element-wise Eigen values of a tensor field.
+ r"""Computes the element-wise Eigen values of a tensor field.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -52,8 +56,9 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Computes the element-wise Eigen values of a tensor field."""
+ def _spec() -> Specification:
+ description = r"""Computes the element-wise Eigen values of a tensor field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,8 +66,7 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -70,14 +74,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -86,29 +90,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="eig_values", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsEigenValues:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsEigenValues
+ inputs:
+ An instance of InputsEigenValues.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsEigenValues:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsEigenValues
+ outputs:
+ An instance of OutputsEigenValues.
"""
return super().outputs
@@ -131,15 +142,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,18 +181,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.eigen_values()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/invariant/eigen_values_fc.py b/src/ansys/dpf/core/operators/invariant/eigen_values_fc.py
index af01aa92be7..2afcc6146d0 100644
--- a/src/ansys/dpf/core/operators/invariant/eigen_values_fc.py
+++ b/src/ansys/dpf/core/operators/invariant/eigen_values_fc.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class eigen_values_fc(Operator):
- """Computes the element-wise Eigen values of all the tensor fields of a
+ r"""Computes the element-wise Eigen values of all the tensor fields of a
fields container.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -51,9 +56,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes the element-wise Eigen values of all the tensor fields of a
- fields container."""
+ def _spec() -> Specification:
+ description = r"""Computes the element-wise Eigen values of all the tensor fields of a
+fields container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,7 +67,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -69,14 +75,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -85,29 +91,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="eig_values_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsEigenValuesFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsEigenValuesFc
+ inputs:
+ An instance of InputsEigenValuesFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsEigenValuesFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsEigenValuesFc
+ outputs:
+ An instance of OutputsEigenValuesFc.
"""
return super().outputs
@@ -130,12 +143,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -166,18 +180,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.eigen_values_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/invariant/eigen_vectors.py b/src/ansys/dpf/core/operators/invariant/eigen_vectors.py
index 838214d8b4a..a6c7d3d179b 100644
--- a/src/ansys/dpf/core/operators/invariant/eigen_vectors.py
+++ b/src/ansys/dpf/core/operators/invariant/eigen_vectors.py
@@ -4,25 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class eigen_vectors(Operator):
- """Computes the element-wise Eigen vectors for each tensor in the field.
+ r"""Computes the element-wise Eigen vectors for each tensor in the field.
+
Parameters
----------
- field : FieldsContainer or Field
- Field or fields container with only one field
- is expected
+ field: FieldsContainer or Field
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -52,10 +56,9 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = (
- """Computes the element-wise Eigen vectors for each tensor in the field."""
- )
+ def _spec() -> Specification:
+ description = r"""Computes the element-wise Eigen vectors for each tensor in the field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -63,8 +66,7 @@ def _spec():
name="field",
type_names=["fields_container", "field"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -72,14 +74,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -88,29 +90,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="eig_vectors", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsEigenVectors:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsEigenVectors
+ inputs:
+ An instance of InputsEigenVectors.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsEigenVectors:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsEigenVectors
+ outputs:
+ An instance of OutputsEigenVectors.
"""
return super().outputs
@@ -133,15 +142,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -172,18 +181,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.eigen_vectors()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/invariant/eigen_vectors_fc.py b/src/ansys/dpf/core/operators/invariant/eigen_vectors_fc.py
index 41abc45e49b..979e048981c 100644
--- a/src/ansys/dpf/core/operators/invariant/eigen_vectors_fc.py
+++ b/src/ansys/dpf/core/operators/invariant/eigen_vectors_fc.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class eigen_vectors_fc(Operator):
- """Computes the element-wise Eigen vectors for each tensor in the fields
- of the field container.
+ r"""Computes the element-wise Eigen vectors for each tensor in the fields of
+ the field container.
+
Parameters
----------
- fields_container : FieldsContainer or Field
+ fields_container: FieldsContainer or Field
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -51,9 +56,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes the element-wise Eigen vectors for each tensor in the fields
- of the field container."""
+ def _spec() -> Specification:
+ description = r"""Computes the element-wise Eigen vectors for each tensor in the fields of
+the field container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,7 +67,7 @@ def _spec():
name="fields_container",
type_names=["fields_container", "field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -69,14 +75,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -85,29 +91,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="eig_vectors_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsEigenVectorsFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsEigenVectorsFc
+ inputs:
+ An instance of InputsEigenVectorsFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsEigenVectorsFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsEigenVectorsFc
+ outputs:
+ An instance of OutputsEigenVectorsFc.
"""
return super().outputs
@@ -130,12 +143,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -166,18 +180,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.eigen_vectors_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/invariant/invariants.py b/src/ansys/dpf/core/operators/invariant/invariants.py
index a94ccb19eaf..8ffdd49d4f3 100644
--- a/src/ansys/dpf/core/operators/invariant/invariants.py
+++ b/src/ansys/dpf/core/operators/invariant/invariants.py
@@ -4,28 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class invariants(Operator):
- """Computes the element-wise invariants of a tensor field.
+ r"""Computes the element-wise invariants of a tensor field.
+
Parameters
----------
- field : Field
+ field: Field
Returns
-------
- field_int : Field
- Stress intensity field
- field_eqv : Field
- Stress equivalent intensity
- field_max_shear : Field
- Max shear stress field
+ field_int: Field
+ stress intensity field
+ field_eqv: Field
+ stress equivalent intensity
+ field_max_shear: Field
+ max shear stress field
Examples
--------
@@ -57,8 +62,9 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Computes the element-wise invariants of a tensor field."""
+ def _spec() -> Specification:
+ description = r"""Computes the element-wise invariants of a tensor field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -66,7 +72,7 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -74,26 +80,26 @@ def _spec():
name="field_int",
type_names=["field"],
optional=False,
- document="""Stress intensity field""",
+ document=r"""stress intensity field""",
),
1: PinSpecification(
name="field_eqv",
type_names=["field"],
optional=False,
- document="""Stress equivalent intensity""",
+ document=r"""stress equivalent intensity""",
),
2: PinSpecification(
name="field_max_shear",
type_names=["field"],
optional=False,
- document="""Max shear stress field""",
+ document=r"""max shear stress field""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -102,29 +108,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="invariants_deriv", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsInvariants:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsInvariants
+ inputs:
+ An instance of InputsInvariants.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsInvariants:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsInvariants
+ outputs:
+ An instance of OutputsInvariants.
"""
return super().outputs
@@ -147,12 +160,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -189,52 +203,61 @@ def __init__(self, op: Operator):
self._outputs.append(self._field_max_shear)
@property
- def field_int(self):
- """Allows to get field_int output of the operator
+ def field_int(self) -> Output:
+ r"""Allows to get field_int output of the operator
+
+ stress intensity field
Returns
- ----------
- my_field_int : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.invariants()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_int = op.outputs.field_int()
- """ # noqa: E501
+ """
return self._field_int
@property
- def field_eqv(self):
- """Allows to get field_eqv output of the operator
+ def field_eqv(self) -> Output:
+ r"""Allows to get field_eqv output of the operator
+
+ stress equivalent intensity
Returns
- ----------
- my_field_eqv : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.invariants()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_eqv = op.outputs.field_eqv()
- """ # noqa: E501
+ """
return self._field_eqv
@property
- def field_max_shear(self):
- """Allows to get field_max_shear output of the operator
+ def field_max_shear(self) -> Output:
+ r"""Allows to get field_max_shear output of the operator
+
+ max shear stress field
Returns
- ----------
- my_field_max_shear : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.invariants()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_max_shear = op.outputs.field_max_shear()
- """ # noqa: E501
+ """
return self._field_max_shear
diff --git a/src/ansys/dpf/core/operators/invariant/invariants_fc.py b/src/ansys/dpf/core/operators/invariant/invariants_fc.py
index e47278cad13..288b31699d8 100644
--- a/src/ansys/dpf/core/operators/invariant/invariants_fc.py
+++ b/src/ansys/dpf/core/operators/invariant/invariants_fc.py
@@ -4,29 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class invariants_fc(Operator):
- """Computes the element-wise invariants of all the tensor fields of a
+ r"""Computes the element-wise invariants of all the tensor fields of a
fields container.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_int : FieldsContainer
- Stress intensity field
- fields_eqv : FieldsContainer
- Stress equivalent intensity
- fields_max_shear : FieldsContainer
- Max shear stress field
+ fields_int: FieldsContainer
+ stress intensity field
+ fields_eqv: FieldsContainer
+ stress equivalent intensity
+ fields_max_shear: FieldsContainer
+ max shear stress field
Examples
--------
@@ -58,9 +63,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes the element-wise invariants of all the tensor fields of a
- fields container."""
+ def _spec() -> Specification:
+ description = r"""Computes the element-wise invariants of all the tensor fields of a
+fields container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -68,7 +74,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -76,26 +82,26 @@ def _spec():
name="fields_int",
type_names=["fields_container"],
optional=False,
- document="""Stress intensity field""",
+ document=r"""stress intensity field""",
),
1: PinSpecification(
name="fields_eqv",
type_names=["fields_container"],
optional=False,
- document="""Stress equivalent intensity""",
+ document=r"""stress equivalent intensity""",
),
2: PinSpecification(
name="fields_max_shear",
type_names=["fields_container"],
optional=False,
- document="""Max shear stress field""",
+ document=r"""max shear stress field""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -104,29 +110,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="invariants_deriv_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsInvariantsFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsInvariantsFc
+ inputs:
+ An instance of InputsInvariantsFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsInvariantsFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsInvariantsFc
+ outputs:
+ An instance of OutputsInvariantsFc.
"""
return super().outputs
@@ -149,12 +162,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -191,52 +205,61 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_max_shear)
@property
- def fields_int(self):
- """Allows to get fields_int output of the operator
+ def fields_int(self) -> Output:
+ r"""Allows to get fields_int output of the operator
+
+ stress intensity field
Returns
- ----------
- my_fields_int : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.invariants_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_int = op.outputs.fields_int()
- """ # noqa: E501
+ """
return self._fields_int
@property
- def fields_eqv(self):
- """Allows to get fields_eqv output of the operator
+ def fields_eqv(self) -> Output:
+ r"""Allows to get fields_eqv output of the operator
+
+ stress equivalent intensity
Returns
- ----------
- my_fields_eqv : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.invariants_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_eqv = op.outputs.fields_eqv()
- """ # noqa: E501
+ """
return self._fields_eqv
@property
- def fields_max_shear(self):
- """Allows to get fields_max_shear output of the operator
+ def fields_max_shear(self) -> Output:
+ r"""Allows to get fields_max_shear output of the operator
+
+ max shear stress field
Returns
- ----------
- my_fields_max_shear : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.invariants_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_max_shear = op.outputs.fields_max_shear()
- """ # noqa: E501
+ """
return self._fields_max_shear
diff --git a/src/ansys/dpf/core/operators/invariant/principal_invariants.py b/src/ansys/dpf/core/operators/invariant/principal_invariants.py
index 595f4f57f4e..bc497ef44ae 100644
--- a/src/ansys/dpf/core/operators/invariant/principal_invariants.py
+++ b/src/ansys/dpf/core/operators/invariant/principal_invariants.py
@@ -4,28 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class principal_invariants(Operator):
- """Computes the element-wise Eigen values of a tensor field.
+ r"""Computes the element-wise Eigen values of a tensor field.
+
Parameters
----------
- field : Field
+ field: Field
Returns
-------
- field_eig_1 : Field
- First eigen value field
- field_eig_2 : Field
- Second eigen value field
- field_eig_3 : Field
- Third eigen value field
+ field_eig_1: Field
+ first eigen value field
+ field_eig_2: Field
+ second eigen value field
+ field_eig_3: Field
+ third eigen value field
Examples
--------
@@ -57,8 +62,9 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Computes the element-wise Eigen values of a tensor field."""
+ def _spec() -> Specification:
+ description = r"""Computes the element-wise Eigen values of a tensor field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -66,7 +72,7 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -74,26 +80,26 @@ def _spec():
name="field_eig_1",
type_names=["field"],
optional=False,
- document="""First eigen value field""",
+ document=r"""first eigen value field""",
),
1: PinSpecification(
name="field_eig_2",
type_names=["field"],
optional=False,
- document="""Second eigen value field""",
+ document=r"""second eigen value field""",
),
2: PinSpecification(
name="field_eig_3",
type_names=["field"],
optional=False,
- document="""Third eigen value field""",
+ document=r"""third eigen value field""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -102,29 +108,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="invariants", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsPrincipalInvariants:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsPrincipalInvariants
+ inputs:
+ An instance of InputsPrincipalInvariants.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsPrincipalInvariants:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsPrincipalInvariants
+ outputs:
+ An instance of OutputsPrincipalInvariants.
"""
return super().outputs
@@ -147,12 +160,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -189,52 +203,61 @@ def __init__(self, op: Operator):
self._outputs.append(self._field_eig_3)
@property
- def field_eig_1(self):
- """Allows to get field_eig_1 output of the operator
+ def field_eig_1(self) -> Output:
+ r"""Allows to get field_eig_1 output of the operator
+
+ first eigen value field
Returns
- ----------
- my_field_eig_1 : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.principal_invariants()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_eig_1 = op.outputs.field_eig_1()
- """ # noqa: E501
+ """
return self._field_eig_1
@property
- def field_eig_2(self):
- """Allows to get field_eig_2 output of the operator
+ def field_eig_2(self) -> Output:
+ r"""Allows to get field_eig_2 output of the operator
+
+ second eigen value field
Returns
- ----------
- my_field_eig_2 : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.principal_invariants()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_eig_2 = op.outputs.field_eig_2()
- """ # noqa: E501
+ """
return self._field_eig_2
@property
- def field_eig_3(self):
- """Allows to get field_eig_3 output of the operator
+ def field_eig_3(self) -> Output:
+ r"""Allows to get field_eig_3 output of the operator
+
+ third eigen value field
Returns
- ----------
- my_field_eig_3 : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.principal_invariants()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_eig_3 = op.outputs.field_eig_3()
- """ # noqa: E501
+ """
return self._field_eig_3
diff --git a/src/ansys/dpf/core/operators/invariant/principal_invariants_fc.py b/src/ansys/dpf/core/operators/invariant/principal_invariants_fc.py
index dd6fda37c28..090b8daa49d 100644
--- a/src/ansys/dpf/core/operators/invariant/principal_invariants_fc.py
+++ b/src/ansys/dpf/core/operators/invariant/principal_invariants_fc.py
@@ -4,29 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class principal_invariants_fc(Operator):
- """Computes the element-wise Eigen values of all the tensor fields of a
+ r"""Computes the element-wise Eigen values of all the tensor fields of a
fields container.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_eig_1 : FieldsContainer
- First eigen value fields
- fields_eig_2 : FieldsContainer
- Second eigen value fields
- fields_eig_3 : FieldsContainer
- Third eigen value fields
+ fields_eig_1: FieldsContainer
+ first eigen value fields
+ fields_eig_2: FieldsContainer
+ second eigen value fields
+ fields_eig_3: FieldsContainer
+ third eigen value fields
Examples
--------
@@ -58,9 +63,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes the element-wise Eigen values of all the tensor fields of a
- fields container."""
+ def _spec() -> Specification:
+ description = r"""Computes the element-wise Eigen values of all the tensor fields of a
+fields container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -68,7 +74,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -76,26 +82,26 @@ def _spec():
name="fields_eig_1",
type_names=["fields_container"],
optional=False,
- document="""First eigen value fields""",
+ document=r"""first eigen value fields""",
),
1: PinSpecification(
name="fields_eig_2",
type_names=["fields_container"],
optional=False,
- document="""Second eigen value fields""",
+ document=r"""second eigen value fields""",
),
2: PinSpecification(
name="fields_eig_3",
type_names=["fields_container"],
optional=False,
- document="""Third eigen value fields""",
+ document=r"""third eigen value fields""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -104,29 +110,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="invariants_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsPrincipalInvariantsFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsPrincipalInvariantsFc
+ inputs:
+ An instance of InputsPrincipalInvariantsFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsPrincipalInvariantsFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsPrincipalInvariantsFc
+ outputs:
+ An instance of OutputsPrincipalInvariantsFc.
"""
return super().outputs
@@ -151,12 +164,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -199,52 +213,61 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_eig_3)
@property
- def fields_eig_1(self):
- """Allows to get fields_eig_1 output of the operator
+ def fields_eig_1(self) -> Output:
+ r"""Allows to get fields_eig_1 output of the operator
+
+ first eigen value fields
Returns
- ----------
- my_fields_eig_1 : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.principal_invariants_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_eig_1 = op.outputs.fields_eig_1()
- """ # noqa: E501
+ """
return self._fields_eig_1
@property
- def fields_eig_2(self):
- """Allows to get fields_eig_2 output of the operator
+ def fields_eig_2(self) -> Output:
+ r"""Allows to get fields_eig_2 output of the operator
+
+ second eigen value fields
Returns
- ----------
- my_fields_eig_2 : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.principal_invariants_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_eig_2 = op.outputs.fields_eig_2()
- """ # noqa: E501
+ """
return self._fields_eig_2
@property
- def fields_eig_3(self):
- """Allows to get fields_eig_3 output of the operator
+ def fields_eig_3(self) -> Output:
+ r"""Allows to get fields_eig_3 output of the operator
+
+ third eigen value fields
Returns
- ----------
- my_fields_eig_3 : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.principal_invariants_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_eig_3 = op.outputs.fields_eig_3()
- """ # noqa: E501
+ """
return self._fields_eig_3
diff --git a/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv.py b/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv.py
index 438dfabf79f..fbb54dc790d 100644
--- a/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv.py
+++ b/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv.py
@@ -4,26 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class segalman_von_mises_eqv(Operator):
- """Computes the element-wise Segalman Von-Mises criteria on a tensor
- field.
+ r"""Computes the element-wise Segalman Von-Mises criteria on a tensor field.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -53,9 +56,9 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Computes the element-wise Segalman Von-Mises criteria on a tensor
- field."""
+ def _spec() -> Specification:
+ description = r"""Computes the element-wise Segalman Von-Mises criteria on a tensor field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -63,8 +66,7 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -72,14 +74,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -88,29 +90,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="segalmaneqv", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSegalmanVonMisesEqv:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSegalmanVonMisesEqv
+ inputs:
+ An instance of InputsSegalmanVonMisesEqv.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSegalmanVonMisesEqv:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSegalmanVonMisesEqv
+ outputs:
+ An instance of OutputsSegalmanVonMisesEqv.
"""
return super().outputs
@@ -133,15 +142,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -172,18 +181,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.segalman_von_mises_eqv()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv_fc.py b/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv_fc.py
index 081719a6877..74d04206a88 100644
--- a/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv_fc.py
+++ b/src/ansys/dpf/core/operators/invariant/segalman_von_mises_eqv_fc.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class segalman_von_mises_eqv_fc(Operator):
- """Computes the element-wise Segalman Von-Mises criteria on all the
- tensor fields of a fields container.
+ r"""Computes the element-wise Segalman Von-Mises criteria on all the tensor
+ fields of a fields container.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -51,9 +56,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes the element-wise Segalman Von-Mises criteria on all the
- tensor fields of a fields container."""
+ def _spec() -> Specification:
+ description = r"""Computes the element-wise Segalman Von-Mises criteria on all the tensor
+fields of a fields container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,7 +67,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -69,14 +75,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -85,29 +91,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="segalmaneqv_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSegalmanVonMisesEqvFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSegalmanVonMisesEqvFc
+ inputs:
+ An instance of InputsSegalmanVonMisesEqvFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSegalmanVonMisesEqvFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSegalmanVonMisesEqvFc
+ outputs:
+ An instance of OutputsSegalmanVonMisesEqvFc.
"""
return super().outputs
@@ -132,12 +145,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,18 +184,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.segalman_von_mises_eqv_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/invariant/von_mises_eqv.py b/src/ansys/dpf/core/operators/invariant/von_mises_eqv.py
index 43ad68ec6be..c7c8c79fe2e 100644
--- a/src/ansys/dpf/core/operators/invariant/von_mises_eqv.py
+++ b/src/ansys/dpf/core/operators/invariant/von_mises_eqv.py
@@ -4,28 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class von_mises_eqv(Operator):
- """Computes the element-wise Von-Mises criteria on a tensor field.
+ r"""Computes the element-wise Von-Mises criteria on a tensor field.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- poisson_ratio : float or int
- Poisson ratio to be used in equivalent strain
- calculation.
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ poisson_ratio: float or int
+ Poisson ratio to be used in equivalent strain calculation.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -60,10 +63,9 @@ def __init__(self, field=None, poisson_ratio=None, config=None, server=None):
self.inputs.poisson_ratio.connect(poisson_ratio)
@staticmethod
- def _spec():
- description = (
- """Computes the element-wise Von-Mises criteria on a tensor field."""
- )
+ def _spec() -> Specification:
+ description = r"""Computes the element-wise Von-Mises criteria on a tensor field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -71,15 +73,13 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
13: PinSpecification(
name="poisson_ratio",
type_names=["double", "int32"],
optional=False,
- document="""Poisson ratio to be used in equivalent strain
- calculation.""",
+ document=r"""Poisson ratio to be used in equivalent strain calculation.""",
),
},
map_output_pin_spec={
@@ -87,14 +87,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -103,29 +103,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="eqv", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsVonMisesEqv:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsVonMisesEqv
+ inputs:
+ An instance of InputsVonMisesEqv.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsVonMisesEqv:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsVonMisesEqv
+ outputs:
+ An instance of OutputsVonMisesEqv.
"""
return super().outputs
@@ -152,15 +159,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._poisson_ratio)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -173,15 +180,15 @@ def field(self):
return self._field
@property
- def poisson_ratio(self):
- """Allows to connect poisson_ratio input to the operator.
+ def poisson_ratio(self) -> Input:
+ r"""Allows to connect poisson_ratio input to the operator.
- Poisson ratio to be used in equivalent strain
- calculation.
+ Poisson ratio to be used in equivalent strain calculation.
- Parameters
- ----------
- my_poisson_ratio : float or int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -212,18 +219,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.von_mises_eqv()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/invariant/von_mises_eqv_fc.py b/src/ansys/dpf/core/operators/invariant/von_mises_eqv_fc.py
index 900eb036580..16b3148f221 100644
--- a/src/ansys/dpf/core/operators/invariant/von_mises_eqv_fc.py
+++ b/src/ansys/dpf/core/operators/invariant/von_mises_eqv_fc.py
@@ -4,27 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class von_mises_eqv_fc(Operator):
- """Computes the element-wise Von-Mises criteria on all the tensor fields
- of a fields container.
+ r"""Computes the element-wise Von-Mises criteria on all the tensor fields of
+ a fields container.
+
Parameters
----------
- fields_container : FieldsContainer
- poisson_ratio : float or int
- Poisson ratio to be used in equivalent strain
- calculation.
+ fields_container: FieldsContainer
+ poisson_ratio: float or int
+ Poisson ratio to be used in equivalent strain calculation.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -61,9 +65,10 @@ def __init__(
self.inputs.poisson_ratio.connect(poisson_ratio)
@staticmethod
- def _spec():
- description = """Computes the element-wise Von-Mises criteria on all the tensor fields
- of a fields container."""
+ def _spec() -> Specification:
+ description = r"""Computes the element-wise Von-Mises criteria on all the tensor fields of
+a fields container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -71,14 +76,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
13: PinSpecification(
name="poisson_ratio",
type_names=["double", "int32"],
optional=False,
- document="""Poisson ratio to be used in equivalent strain
- calculation.""",
+ document=r"""Poisson ratio to be used in equivalent strain calculation.""",
),
},
map_output_pin_spec={
@@ -86,14 +90,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -102,29 +106,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="eqv_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsVonMisesEqvFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsVonMisesEqvFc
+ inputs:
+ An instance of InputsVonMisesEqvFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsVonMisesEqvFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsVonMisesEqvFc
+ outputs:
+ An instance of OutputsVonMisesEqvFc.
"""
return super().outputs
@@ -151,12 +162,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._poisson_ratio)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -169,15 +181,15 @@ def fields_container(self):
return self._fields_container
@property
- def poisson_ratio(self):
- """Allows to connect poisson_ratio input to the operator.
+ def poisson_ratio(self) -> Input:
+ r"""Allows to connect poisson_ratio input to the operator.
- Poisson ratio to be used in equivalent strain
- calculation.
+ Poisson ratio to be used in equivalent strain calculation.
- Parameters
- ----------
- my_poisson_ratio : float or int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -208,18 +220,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.invariant.von_mises_eqv_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/logic/ascending_sort.py b/src/ansys/dpf/core/operators/logic/ascending_sort.py
index 29fc4466db2..69dc61806bc 100644
--- a/src/ansys/dpf/core/operators/logic/ascending_sort.py
+++ b/src/ansys/dpf/core/operators/logic/ascending_sort.py
@@ -4,33 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class ascending_sort(Operator):
- """Sort a field (in 0) in ascending order with an optional component
- priority table, or a boolean, to enable sort by scoping (in 1).
- This operator does not support multiple elementary data per
- entity.
+ r"""Sort a field (in 0) in ascending order with an optional component
+ priority table, or a boolean, to enable sort by scoping (in 1). This
+ operator does not support multiple elementary data per entity.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- component_priority_table : optional
- Component priority table (vector of int)
- sort_by_scoping : bool, optional
- If true, uses scoping to sort the field
- (default is false)
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ component_priority_table: optional
+ component priority table (vector of int)
+ sort_by_scoping: bool, optional
+ if true, uses scoping to sort the field (default is false)
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -77,11 +79,11 @@ def __init__(
self.inputs.sort_by_scoping.connect(sort_by_scoping)
@staticmethod
- def _spec():
- description = """Sort a field (in 0) in ascending order with an optional component
- priority table, or a boolean, to enable sort by scoping
- (in 1). This operator does not support multiple elementary
- data per entity."""
+ def _spec() -> Specification:
+ description = r"""Sort a field (in 0) in ascending order with an optional component
+priority table, or a boolean, to enable sort by scoping (in 1). This
+operator does not support multiple elementary data per entity.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -89,21 +91,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="component_priority_table",
type_names=["vector"],
optional=True,
- document="""Component priority table (vector of int)""",
+ document=r"""component priority table (vector of int)""",
),
2: PinSpecification(
name="sort_by_scoping",
type_names=["bool"],
optional=True,
- document="""If true, uses scoping to sort the field
- (default is false)""",
+ document=r"""if true, uses scoping to sort the field (default is false)""",
),
},
map_output_pin_spec={
@@ -111,14 +111,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -127,29 +127,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ascending_sort", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAscendingSort:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAscendingSort
+ inputs:
+ An instance of InputsAscendingSort.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAscendingSort:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAscendingSort
+ outputs:
+ An instance of OutputsAscendingSort.
"""
return super().outputs
@@ -182,15 +189,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._sort_by_scoping)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -203,14 +210,15 @@ def field(self):
return self._field
@property
- def component_priority_table(self):
- """Allows to connect component_priority_table input to the operator.
+ def component_priority_table(self) -> Input:
+ r"""Allows to connect component_priority_table input to the operator.
- Component priority table (vector of int)
+ component priority table (vector of int)
- Parameters
- ----------
- my_component_priority_table :
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -223,15 +231,15 @@ def component_priority_table(self):
return self._component_priority_table
@property
- def sort_by_scoping(self):
- """Allows to connect sort_by_scoping input to the operator.
+ def sort_by_scoping(self) -> Input:
+ r"""Allows to connect sort_by_scoping input to the operator.
- If true, uses scoping to sort the field
- (default is false)
+ if true, uses scoping to sort the field (default is false)
- Parameters
- ----------
- my_sort_by_scoping : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -262,18 +270,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.ascending_sort()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/logic/ascending_sort_fc.py b/src/ansys/dpf/core/operators/logic/ascending_sort_fc.py
index 40c4685a4e3..1d1b238fdca 100644
--- a/src/ansys/dpf/core/operators/logic/ascending_sort_fc.py
+++ b/src/ansys/dpf/core/operators/logic/ascending_sort_fc.py
@@ -4,33 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class ascending_sort_fc(Operator):
- """Sort a field (in 0) in ascending order with an optional component
- priority table, or a boolean, to enable sort by scoping (in 1).
- This operator does not support multiple elementary data per
- entity.
+ r"""Sort a field (in 0) in ascending order with an optional component
+ priority table, or a boolean, to enable sort by scoping (in 1). This
+ operator does not support multiple elementary data per entity.
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
- component_priority_table : optional
- Component priority table (vector of int)
- sort_by_scoping : bool, optional
- If true, uses scoping to sort the field
- (default is false)
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
+ component_priority_table: optional
+ component priority table (vector of int)
+ sort_by_scoping: bool, optional
+ if true, uses scoping to sort the field (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -77,11 +79,11 @@ def __init__(
self.inputs.sort_by_scoping.connect(sort_by_scoping)
@staticmethod
- def _spec():
- description = """Sort a field (in 0) in ascending order with an optional component
- priority table, or a boolean, to enable sort by scoping
- (in 1). This operator does not support multiple elementary
- data per entity."""
+ def _spec() -> Specification:
+ description = r"""Sort a field (in 0) in ascending order with an optional component
+priority table, or a boolean, to enable sort by scoping (in 1). This
+operator does not support multiple elementary data per entity.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -89,21 +91,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="component_priority_table",
type_names=["vector"],
optional=True,
- document="""Component priority table (vector of int)""",
+ document=r"""component priority table (vector of int)""",
),
2: PinSpecification(
name="sort_by_scoping",
type_names=["bool"],
optional=True,
- document="""If true, uses scoping to sort the field
- (default is false)""",
+ document=r"""if true, uses scoping to sort the field (default is false)""",
),
},
map_output_pin_spec={
@@ -111,14 +111,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -127,29 +127,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ascending_sort_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAscendingSortFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAscendingSortFc
+ inputs:
+ An instance of InputsAscendingSortFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAscendingSortFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAscendingSortFc
+ outputs:
+ An instance of OutputsAscendingSortFc.
"""
return super().outputs
@@ -184,15 +191,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._sort_by_scoping)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -205,14 +212,15 @@ def fields_container(self):
return self._fields_container
@property
- def component_priority_table(self):
- """Allows to connect component_priority_table input to the operator.
+ def component_priority_table(self) -> Input:
+ r"""Allows to connect component_priority_table input to the operator.
- Component priority table (vector of int)
+ component priority table (vector of int)
- Parameters
- ----------
- my_component_priority_table :
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -225,15 +233,15 @@ def component_priority_table(self):
return self._component_priority_table
@property
- def sort_by_scoping(self):
- """Allows to connect sort_by_scoping input to the operator.
+ def sort_by_scoping(self) -> Input:
+ r"""Allows to connect sort_by_scoping input to the operator.
- If true, uses scoping to sort the field
- (default is false)
+ if true, uses scoping to sort the field (default is false)
- Parameters
- ----------
- my_sort_by_scoping : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -264,18 +272,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.ascending_sort_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/logic/component_selector.py b/src/ansys/dpf/core/operators/logic/component_selector.py
index 74859425044..2be6cb88cbc 100644
--- a/src/ansys/dpf/core/operators/logic/component_selector.py
+++ b/src/ansys/dpf/core/operators/logic/component_selector.py
@@ -4,29 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class component_selector(Operator):
- """Creates a scalar/vector field based on the selected component.
+ r"""Creates a scalar/vector field based on the selected component.
+
Parameters
----------
- field : Field or FieldsContainer
- component_number : int
- One or several component index that will be
- extracted from the initial field.
- default_value : float, optional
- Set a default value for components that do
- not exist.
+ field: Field or FieldsContainer
+ component_number: int
+ One or several component index that will be extracted from the initial field.
+ default_value: float, optional
+ Set a default value for components that do not exist.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -73,10 +76,9 @@ def __init__(
self.inputs.default_value.connect(default_value)
@staticmethod
- def _spec():
- description = (
- """Creates a scalar/vector field based on the selected component."""
- )
+ def _spec() -> Specification:
+ description = r"""Creates a scalar/vector field based on the selected component.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -84,21 +86,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="component_number",
type_names=["int32", "vector"],
optional=False,
- document="""One or several component index that will be
- extracted from the initial field.""",
+ document=r"""One or several component index that will be extracted from the initial field.""",
),
2: PinSpecification(
name="default_value",
type_names=["double"],
optional=True,
- document="""Set a default value for components that do
- not exist.""",
+ document=r"""Set a default value for components that do not exist.""",
),
},
map_output_pin_spec={
@@ -106,14 +106,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -122,29 +122,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="component_selector", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComponentSelector:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComponentSelector
+ inputs:
+ An instance of InputsComponentSelector.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComponentSelector:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComponentSelector
+ outputs:
+ An instance of OutputsComponentSelector.
"""
return super().outputs
@@ -177,12 +184,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._default_value)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -195,15 +203,15 @@ def field(self):
return self._field
@property
- def component_number(self):
- """Allows to connect component_number input to the operator.
+ def component_number(self) -> Input:
+ r"""Allows to connect component_number input to the operator.
- One or several component index that will be
- extracted from the initial field.
+ One or several component index that will be extracted from the initial field.
- Parameters
- ----------
- my_component_number : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -216,15 +224,15 @@ def component_number(self):
return self._component_number
@property
- def default_value(self):
- """Allows to connect default_value input to the operator.
+ def default_value(self) -> Input:
+ r"""Allows to connect default_value input to the operator.
- Set a default value for components that do
- not exist.
+ Set a default value for components that do not exist.
- Parameters
- ----------
- my_default_value : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -255,18 +263,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.component_selector()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/logic/component_selector_fc.py b/src/ansys/dpf/core/operators/logic/component_selector_fc.py
index cd1a1085b2e..7d9e3cbb1e8 100644
--- a/src/ansys/dpf/core/operators/logic/component_selector_fc.py
+++ b/src/ansys/dpf/core/operators/logic/component_selector_fc.py
@@ -4,27 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class component_selector_fc(Operator):
- """Creates a scalar fields container based on the selected component for
+ r"""Creates a scalar fields container based on the selected component for
each field.
+
Parameters
----------
- fields_container : FieldsContainer or Field
- component_number : int
- One or several component index that will be
- extracted from the initial field.
+ fields_container: FieldsContainer or Field
+ component_number: int
+ one or several component index that will be extracted from the initial field.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -61,9 +65,10 @@ def __init__(
self.inputs.component_number.connect(component_number)
@staticmethod
- def _spec():
- description = """Creates a scalar fields container based on the selected component for
- each field."""
+ def _spec() -> Specification:
+ description = r"""Creates a scalar fields container based on the selected component for
+each field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -71,14 +76,13 @@ def _spec():
name="fields_container",
type_names=["fields_container", "field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="component_number",
type_names=["int32", "vector"],
optional=False,
- document="""One or several component index that will be
- extracted from the initial field.""",
+ document=r"""one or several component index that will be extracted from the initial field.""",
),
},
map_output_pin_spec={
@@ -86,14 +90,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -102,29 +106,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="component_selector_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComponentSelectorFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComponentSelectorFc
+ inputs:
+ An instance of InputsComponentSelectorFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComponentSelectorFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComponentSelectorFc
+ outputs:
+ An instance of OutputsComponentSelectorFc.
"""
return super().outputs
@@ -155,12 +166,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._component_number)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -173,15 +185,15 @@ def fields_container(self):
return self._fields_container
@property
- def component_number(self):
- """Allows to connect component_number input to the operator.
+ def component_number(self) -> Input:
+ r"""Allows to connect component_number input to the operator.
- One or several component index that will be
- extracted from the initial field.
+ one or several component index that will be extracted from the initial field.
- Parameters
- ----------
- my_component_number : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -214,18 +226,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.component_selector_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/logic/component_transformer.py b/src/ansys/dpf/core/operators/logic/component_transformer.py
index 1f0028999ad..bc245139b8a 100644
--- a/src/ansys/dpf/core/operators/logic/component_transformer.py
+++ b/src/ansys/dpf/core/operators/logic/component_transformer.py
@@ -4,29 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class component_transformer(Operator):
- """Takes the input field and creates a field with overriden value on
- given components.
+ r"""Takes the input field and creates a field with overriden value on given
+ components.
+
Parameters
----------
- field : Field or FieldsContainer
- component_number : int
- One or several component index that will be
- modified from the initial field.
- default_value : float, optional
+ field: Field or FieldsContainer
+ component_number: int
+ One or several component index that will be modified from the initial field.
+ default_value: float, optional
Set a default value for components selected.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -73,9 +77,10 @@ def __init__(
self.inputs.default_value.connect(default_value)
@staticmethod
- def _spec():
- description = """Takes the input field and creates a field with overriden value on
- given components."""
+ def _spec() -> Specification:
+ description = r"""Takes the input field and creates a field with overriden value on given
+components.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -83,20 +88,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="component_number",
type_names=["int32", "vector"],
optional=False,
- document="""One or several component index that will be
- modified from the initial field.""",
+ document=r"""One or several component index that will be modified from the initial field.""",
),
2: PinSpecification(
name="default_value",
type_names=["double"],
optional=True,
- document="""Set a default value for components selected.""",
+ document=r"""Set a default value for components selected.""",
),
},
map_output_pin_spec={
@@ -104,14 +108,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -120,29 +124,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="component_transformer", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComponentTransformer:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComponentTransformer
+ inputs:
+ An instance of InputsComponentTransformer.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComponentTransformer:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComponentTransformer
+ outputs:
+ An instance of OutputsComponentTransformer.
"""
return super().outputs
@@ -177,12 +188,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._default_value)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -195,15 +207,15 @@ def field(self):
return self._field
@property
- def component_number(self):
- """Allows to connect component_number input to the operator.
+ def component_number(self) -> Input:
+ r"""Allows to connect component_number input to the operator.
- One or several component index that will be
- modified from the initial field.
+ One or several component index that will be modified from the initial field.
- Parameters
- ----------
- my_component_number : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -216,14 +228,15 @@ def component_number(self):
return self._component_number
@property
- def default_value(self):
- """Allows to connect default_value input to the operator.
+ def default_value(self) -> Input:
+ r"""Allows to connect default_value input to the operator.
Set a default value for components selected.
- Parameters
- ----------
- my_default_value : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -254,18 +267,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.component_transformer()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/logic/component_transformer_fc.py b/src/ansys/dpf/core/operators/logic/component_transformer_fc.py
index 3a170d6f756..436b5832b86 100644
--- a/src/ansys/dpf/core/operators/logic/component_transformer_fc.py
+++ b/src/ansys/dpf/core/operators/logic/component_transformer_fc.py
@@ -4,29 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class component_transformer_fc(Operator):
- """Takes the input field and creates a field with overriden value on
- given components.
+ r"""Takes the input field and creates a field with overriden value on given
+ components.
+
Parameters
----------
- fields_container : FieldsContainer
- component_number : int
- One or several component index that will be
- modified from the initial field.
- default_value : float, optional
+ fields_container: FieldsContainer
+ component_number: int
+ One or several component index that will be modified from the initial field.
+ default_value: float, optional
Set a default value for components selected.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -73,9 +77,10 @@ def __init__(
self.inputs.default_value.connect(default_value)
@staticmethod
- def _spec():
- description = """Takes the input field and creates a field with overriden value on
- given components."""
+ def _spec() -> Specification:
+ description = r"""Takes the input field and creates a field with overriden value on given
+components.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -83,20 +88,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="component_number",
type_names=["int32", "vector"],
optional=False,
- document="""One or several component index that will be
- modified from the initial field.""",
+ document=r"""One or several component index that will be modified from the initial field.""",
),
2: PinSpecification(
name="default_value",
type_names=["double"],
optional=True,
- document="""Set a default value for components selected.""",
+ document=r"""Set a default value for components selected.""",
),
},
map_output_pin_spec={
@@ -104,14 +108,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -120,29 +124,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="component_transformer_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComponentTransformerFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComponentTransformerFc
+ inputs:
+ An instance of InputsComponentTransformerFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComponentTransformerFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComponentTransformerFc
+ outputs:
+ An instance of OutputsComponentTransformerFc.
"""
return super().outputs
@@ -179,12 +190,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._default_value)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -197,15 +209,15 @@ def fields_container(self):
return self._fields_container
@property
- def component_number(self):
- """Allows to connect component_number input to the operator.
+ def component_number(self) -> Input:
+ r"""Allows to connect component_number input to the operator.
- One or several component index that will be
- modified from the initial field.
+ One or several component index that will be modified from the initial field.
- Parameters
- ----------
- my_component_number : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -218,14 +230,15 @@ def component_number(self):
return self._component_number
@property
- def default_value(self):
- """Allows to connect default_value input to the operator.
+ def default_value(self) -> Input:
+ r"""Allows to connect default_value input to the operator.
Set a default value for components selected.
- Parameters
- ----------
- my_default_value : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -258,18 +271,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.component_transformer_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/logic/descending_sort.py b/src/ansys/dpf/core/operators/logic/descending_sort.py
index df948a98c0d..31de4366931 100644
--- a/src/ansys/dpf/core/operators/logic/descending_sort.py
+++ b/src/ansys/dpf/core/operators/logic/descending_sort.py
@@ -4,32 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class descending_sort(Operator):
- """Sort a field (in 0) in descending order, with an optional component
+ r"""Sort a field (in 0) in descending order, with an optional component
priority table or a boolean to enable sort by scoping (in 1). This
- operator doesn't support multiple elementary data per entity.
+ operator doesn’t support multiple elementary data per entity.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- component_priority_table : optional
- Component priority table (vector of int)
- sort_by_scoping : bool, optional
- If true, uses scoping to sort the field
- (default is false)
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ component_priority_table: optional
+ component priority table (vector of int)
+ sort_by_scoping: bool, optional
+ if true, uses scoping to sort the field (default is false)
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -76,11 +79,11 @@ def __init__(
self.inputs.sort_by_scoping.connect(sort_by_scoping)
@staticmethod
- def _spec():
- description = """Sort a field (in 0) in descending order, with an optional component
- priority table or a boolean to enable sort by scoping (in
- 1). This operator doesn't support multiple elementary data
- per entity."""
+ def _spec() -> Specification:
+ description = r"""Sort a field (in 0) in descending order, with an optional component
+priority table or a boolean to enable sort by scoping (in 1). This
+operator doesn’t support multiple elementary data per entity.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,21 +91,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="component_priority_table",
type_names=["vector"],
optional=True,
- document="""Component priority table (vector of int)""",
+ document=r"""component priority table (vector of int)""",
),
2: PinSpecification(
name="sort_by_scoping",
type_names=["bool"],
optional=True,
- document="""If true, uses scoping to sort the field
- (default is false)""",
+ document=r"""if true, uses scoping to sort the field (default is false)""",
),
},
map_output_pin_spec={
@@ -110,14 +111,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -126,29 +127,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="descending_sort", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsDescendingSort:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsDescendingSort
+ inputs:
+ An instance of InputsDescendingSort.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsDescendingSort:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsDescendingSort
+ outputs:
+ An instance of OutputsDescendingSort.
"""
return super().outputs
@@ -181,15 +189,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._sort_by_scoping)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -202,14 +210,15 @@ def field(self):
return self._field
@property
- def component_priority_table(self):
- """Allows to connect component_priority_table input to the operator.
+ def component_priority_table(self) -> Input:
+ r"""Allows to connect component_priority_table input to the operator.
- Component priority table (vector of int)
+ component priority table (vector of int)
- Parameters
- ----------
- my_component_priority_table :
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -222,15 +231,15 @@ def component_priority_table(self):
return self._component_priority_table
@property
- def sort_by_scoping(self):
- """Allows to connect sort_by_scoping input to the operator.
+ def sort_by_scoping(self) -> Input:
+ r"""Allows to connect sort_by_scoping input to the operator.
- If true, uses scoping to sort the field
- (default is false)
+ if true, uses scoping to sort the field (default is false)
- Parameters
- ----------
- my_sort_by_scoping : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -261,18 +270,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.descending_sort()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/logic/descending_sort_fc.py b/src/ansys/dpf/core/operators/logic/descending_sort_fc.py
index 34ad651051d..84d6a7b655b 100644
--- a/src/ansys/dpf/core/operators/logic/descending_sort_fc.py
+++ b/src/ansys/dpf/core/operators/logic/descending_sort_fc.py
@@ -4,32 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class descending_sort_fc(Operator):
- """Sort a field (in 0) in descending order, with an optional component
+ r"""Sort a field (in 0) in descending order, with an optional component
priority table or a boolean to enable sort by scoping (in 1). This
- operator doesn't support multiple elementary data per entity.
+ operator doesn’t support multiple elementary data per entity.
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
- component_priority_table : optional
- Component priority table (vector of int)
- sort_by_scoping : bool, optional
- If true, uses scoping to sort the field
- (default is false)
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
+ component_priority_table: optional
+ component priority table (vector of int)
+ sort_by_scoping: bool, optional
+ if true, uses scoping to sort the field (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -76,11 +79,11 @@ def __init__(
self.inputs.sort_by_scoping.connect(sort_by_scoping)
@staticmethod
- def _spec():
- description = """Sort a field (in 0) in descending order, with an optional component
- priority table or a boolean to enable sort by scoping (in
- 1). This operator doesn't support multiple elementary data
- per entity."""
+ def _spec() -> Specification:
+ description = r"""Sort a field (in 0) in descending order, with an optional component
+priority table or a boolean to enable sort by scoping (in 1). This
+operator doesn’t support multiple elementary data per entity.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,21 +91,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="component_priority_table",
type_names=["vector"],
optional=True,
- document="""Component priority table (vector of int)""",
+ document=r"""component priority table (vector of int)""",
),
2: PinSpecification(
name="sort_by_scoping",
type_names=["bool"],
optional=True,
- document="""If true, uses scoping to sort the field
- (default is false)""",
+ document=r"""if true, uses scoping to sort the field (default is false)""",
),
},
map_output_pin_spec={
@@ -110,14 +111,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -126,29 +127,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="descending_sort_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsDescendingSortFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsDescendingSortFc
+ inputs:
+ An instance of InputsDescendingSortFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsDescendingSortFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsDescendingSortFc
+ outputs:
+ An instance of OutputsDescendingSortFc.
"""
return super().outputs
@@ -185,15 +193,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._sort_by_scoping)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -206,14 +214,15 @@ def fields_container(self):
return self._fields_container
@property
- def component_priority_table(self):
- """Allows to connect component_priority_table input to the operator.
+ def component_priority_table(self) -> Input:
+ r"""Allows to connect component_priority_table input to the operator.
- Component priority table (vector of int)
+ component priority table (vector of int)
- Parameters
- ----------
- my_component_priority_table :
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -226,15 +235,15 @@ def component_priority_table(self):
return self._component_priority_table
@property
- def sort_by_scoping(self):
- """Allows to connect sort_by_scoping input to the operator.
+ def sort_by_scoping(self) -> Input:
+ r"""Allows to connect sort_by_scoping input to the operator.
- If true, uses scoping to sort the field
- (default is false)
+ if true, uses scoping to sort the field (default is false)
- Parameters
- ----------
- my_sort_by_scoping : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -265,18 +274,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.descending_sort_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/logic/elementary_data_selector.py b/src/ansys/dpf/core/operators/logic/elementary_data_selector.py
index a09e057f278..f841ac7e852 100644
--- a/src/ansys/dpf/core/operators/logic/elementary_data_selector.py
+++ b/src/ansys/dpf/core/operators/logic/elementary_data_selector.py
@@ -4,34 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elementary_data_selector(Operator):
- """Creates a scalar/vector field based on the selected elementary data.
+ r"""Creates a scalar/vector field based on the selected elementary data.
+
Parameters
----------
- field : Field or FieldsContainer
- elementary_data_index : int
- One or several elementary data index that
- will be extracted from the initial
- field. for field with nature matrix,
- this is the line indices to extract.
- default_value : float, optional
- Set a default value for elementary data that
- do not exist.
- elementary_data_index_2 : int, optional
- For field with nature matrix, this is the
- column indices to extract.
+ field: Field or FieldsContainer
+ elementary_data_index: int
+ One or several elementary data index that will be extracted from the initial field. For field with nature matrix, this is the line indices to extract.
+ default_value: float, optional
+ Set a default value for elementary data that do not exist.
+ elementary_data_index_2: int, optional
+ For field with nature matrix, this is the column indices to extract.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -84,10 +84,9 @@ def __init__(
self.inputs.elementary_data_index_2.connect(elementary_data_index_2)
@staticmethod
- def _spec():
- description = (
- """Creates a scalar/vector field based on the selected elementary data."""
- )
+ def _spec() -> Specification:
+ description = r"""Creates a scalar/vector field based on the selected elementary data.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -95,30 +94,25 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="elementary_data_index",
type_names=["int32", "vector"],
optional=False,
- document="""One or several elementary data index that
- will be extracted from the initial
- field. for field with nature matrix,
- this is the line indices to extract.""",
+ document=r"""One or several elementary data index that will be extracted from the initial field. For field with nature matrix, this is the line indices to extract.""",
),
2: PinSpecification(
name="default_value",
type_names=["double"],
optional=True,
- document="""Set a default value for elementary data that
- do not exist.""",
+ document=r"""Set a default value for elementary data that do not exist.""",
),
3: PinSpecification(
name="elementary_data_index_2",
type_names=["int32", "vector"],
optional=True,
- document="""For field with nature matrix, this is the
- column indices to extract.""",
+ document=r"""For field with nature matrix, this is the column indices to extract.""",
),
},
map_output_pin_spec={
@@ -126,14 +120,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -142,29 +136,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="elementary_data_selector", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementaryDataSelector:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementaryDataSelector
+ inputs:
+ An instance of InputsElementaryDataSelector.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementaryDataSelector:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementaryDataSelector
+ outputs:
+ An instance of OutputsElementaryDataSelector.
"""
return super().outputs
@@ -205,12 +206,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._elementary_data_index_2)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -223,17 +225,15 @@ def field(self):
return self._field
@property
- def elementary_data_index(self):
- """Allows to connect elementary_data_index input to the operator.
+ def elementary_data_index(self) -> Input:
+ r"""Allows to connect elementary_data_index input to the operator.
- One or several elementary data index that
- will be extracted from the initial
- field. for field with nature matrix,
- this is the line indices to extract.
+ One or several elementary data index that will be extracted from the initial field. For field with nature matrix, this is the line indices to extract.
- Parameters
- ----------
- my_elementary_data_index : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -246,15 +246,15 @@ def elementary_data_index(self):
return self._elementary_data_index
@property
- def default_value(self):
- """Allows to connect default_value input to the operator.
+ def default_value(self) -> Input:
+ r"""Allows to connect default_value input to the operator.
- Set a default value for elementary data that
- do not exist.
+ Set a default value for elementary data that do not exist.
- Parameters
- ----------
- my_default_value : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -267,15 +267,15 @@ def default_value(self):
return self._default_value
@property
- def elementary_data_index_2(self):
- """Allows to connect elementary_data_index_2 input to the operator.
+ def elementary_data_index_2(self) -> Input:
+ r"""Allows to connect elementary_data_index_2 input to the operator.
- For field with nature matrix, this is the
- column indices to extract.
+ For field with nature matrix, this is the column indices to extract.
- Parameters
- ----------
- my_elementary_data_index_2 : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -306,18 +306,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.elementary_data_selector()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/logic/elementary_data_selector_fc.py b/src/ansys/dpf/core/operators/logic/elementary_data_selector_fc.py
index 039b2fa6912..384de7f9b10 100644
--- a/src/ansys/dpf/core/operators/logic/elementary_data_selector_fc.py
+++ b/src/ansys/dpf/core/operators/logic/elementary_data_selector_fc.py
@@ -4,33 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elementary_data_selector_fc(Operator):
- """Creates a scalar fields container based on the selected elementary
- data for each field.
+ r"""Creates a scalar fields container based on the selected elementary data
+ for each field.
+
Parameters
----------
- fields_container : FieldsContainer or Field
- elementary_data_index : int
- One or several elementary data indices that
- will be extracted from the initial
- field. for a field with a nature
- matrix, this extracts the line
- indices.
- elementary_data_index_2 : int, optional
- For a field with nature matrix, this extracts
- the column indices.
+ fields_container: FieldsContainer or Field
+ elementary_data_index: int
+ one or several elementary data indices that will be extracted from the initial field. For a field with a nature matrix, this extracts the line indices.
+ elementary_data_index_2: int, optional
+ For a field with nature matrix, this extracts the column indices.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -79,9 +79,10 @@ def __init__(
self.inputs.elementary_data_index_2.connect(elementary_data_index_2)
@staticmethod
- def _spec():
- description = """Creates a scalar fields container based on the selected elementary
- data for each field."""
+ def _spec() -> Specification:
+ description = r"""Creates a scalar fields container based on the selected elementary data
+for each field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -89,24 +90,19 @@ def _spec():
name="fields_container",
type_names=["fields_container", "field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="elementary_data_index",
type_names=["int32", "vector"],
optional=False,
- document="""One or several elementary data indices that
- will be extracted from the initial
- field. for a field with a nature
- matrix, this extracts the line
- indices.""",
+ document=r"""one or several elementary data indices that will be extracted from the initial field. For a field with a nature matrix, this extracts the line indices.""",
),
3: PinSpecification(
name="elementary_data_index_2",
type_names=["int32", "vector"],
optional=True,
- document="""For a field with nature matrix, this extracts
- the column indices.""",
+ document=r"""For a field with nature matrix, this extracts the column indices.""",
),
},
map_output_pin_spec={
@@ -114,14 +110,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -130,31 +126,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="elementary_data_selector_fc", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementaryDataSelectorFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementaryDataSelectorFc
+ inputs:
+ An instance of InputsElementaryDataSelectorFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementaryDataSelectorFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementaryDataSelectorFc
+ outputs:
+ An instance of OutputsElementaryDataSelectorFc.
"""
return super().outputs
@@ -191,12 +194,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._elementary_data_index_2)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -209,18 +213,15 @@ def fields_container(self):
return self._fields_container
@property
- def elementary_data_index(self):
- """Allows to connect elementary_data_index input to the operator.
+ def elementary_data_index(self) -> Input:
+ r"""Allows to connect elementary_data_index input to the operator.
- One or several elementary data indices that
- will be extracted from the initial
- field. for a field with a nature
- matrix, this extracts the line
- indices.
+ one or several elementary data indices that will be extracted from the initial field. For a field with a nature matrix, this extracts the line indices.
- Parameters
- ----------
- my_elementary_data_index : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -233,15 +234,15 @@ def elementary_data_index(self):
return self._elementary_data_index
@property
- def elementary_data_index_2(self):
- """Allows to connect elementary_data_index_2 input to the operator.
+ def elementary_data_index_2(self) -> Input:
+ r"""Allows to connect elementary_data_index_2 input to the operator.
- For a field with nature matrix, this extracts
- the column indices.
+ For a field with nature matrix, this extracts the column indices.
- Parameters
- ----------
- my_elementary_data_index_2 : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -274,18 +275,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.elementary_data_selector_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/logic/enrich_materials.py b/src/ansys/dpf/core/operators/logic/enrich_materials.py
index e2b3c846bb9..67c0c225758 100644
--- a/src/ansys/dpf/core/operators/logic/enrich_materials.py
+++ b/src/ansys/dpf/core/operators/logic/enrich_materials.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class enrich_materials(Operator):
- """Takes a MaterialContainer and a stream and enriches the
+ r"""Takes a MaterialContainer and a stream and enriches the
MaterialContainer using stream data.
+
Parameters
----------
- MaterialContainer :
- streams : StreamsContainer or FieldsContainer
- streams_mapping : PropertyFieldsContainer
+ MaterialContainer:
+ streams: StreamsContainer or FieldsContainer
+ streams_mapping: PropertyFieldsContainer
Returns
-------
- MaterialContainer : bool
+ MaterialContainer: bool
Examples
--------
@@ -70,9 +75,10 @@ def __init__(
self.inputs.streams_mapping.connect(streams_mapping)
@staticmethod
- def _spec():
- description = """Takes a MaterialContainer and a stream and enriches the
- MaterialContainer using stream data."""
+ def _spec() -> Specification:
+ description = r"""Takes a MaterialContainer and a stream and enriches the
+MaterialContainer using stream data.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -80,19 +86,19 @@ def _spec():
name="MaterialContainer",
type_names=["any"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="streams",
type_names=["streams_container", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="streams_mapping",
type_names=["property_fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -100,14 +106,14 @@ def _spec():
name="MaterialContainer",
type_names=["bool"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -116,29 +122,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="enrich_materials", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsEnrichMaterials:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsEnrichMaterials
+ inputs:
+ An instance of InputsEnrichMaterials.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsEnrichMaterials:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsEnrichMaterials
+ outputs:
+ An instance of OutputsEnrichMaterials.
"""
return super().outputs
@@ -171,12 +184,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._streams_mapping)
@property
- def MaterialContainer(self):
- """Allows to connect MaterialContainer input to the operator.
+ def MaterialContainer(self) -> Input:
+ r"""Allows to connect MaterialContainer input to the operator.
- Parameters
- ----------
- my_MaterialContainer :
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -189,12 +203,13 @@ def MaterialContainer(self):
return self._MaterialContainer
@property
- def streams(self):
- """Allows to connect streams input to the operator.
+ def streams(self) -> Input:
+ r"""Allows to connect streams input to the operator.
- Parameters
- ----------
- my_streams : StreamsContainer or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -207,12 +222,13 @@ def streams(self):
return self._streams
@property
- def streams_mapping(self):
- """Allows to connect streams_mapping input to the operator.
+ def streams_mapping(self) -> Input:
+ r"""Allows to connect streams_mapping input to the operator.
- Parameters
- ----------
- my_streams_mapping : PropertyFieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -243,18 +259,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._MaterialContainer)
@property
- def MaterialContainer(self):
- """Allows to get MaterialContainer output of the operator
+ def MaterialContainer(self) -> Output:
+ r"""Allows to get MaterialContainer output of the operator
Returns
- ----------
- my_MaterialContainer : bool
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.enrich_materials()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_MaterialContainer = op.outputs.MaterialContainer()
- """ # noqa: E501
+ """
return self._MaterialContainer
diff --git a/src/ansys/dpf/core/operators/logic/identical_anys.py b/src/ansys/dpf/core/operators/logic/identical_anys.py
index fc17ef992df..975e9b94416 100644
--- a/src/ansys/dpf/core/operators/logic/identical_anys.py
+++ b/src/ansys/dpf/core/operators/logic/identical_anys.py
@@ -4,50 +4,42 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class identical_anys(Operator):
- """Takes two Any objects and compares them.Supported types: Field,
+ r"""Takes two Any objects and compares them.Supported types: Field,
FieldsContainer, Mesh, MeshesContainer, PropertyField,
PropertyFieldsContainer, Scoping, ScopingsContainer, StringField,
- standard types (double, int, string, vector of int, doubles,
- string). Note: all inputs related to fields, mesh, and so on are
- passed to each property check.
+ standard types (double, int, string, vector of int, doubles, string).
+ Note: all inputs related to fields, mesh, and so on are passed to each
+ property check.
+
Parameters
----------
- anyA : Any
- anyB : Any
- double_value : float
- Double positive small value. smallest value
- considered during the comparison
- step. all the absolute values in the
- field less than this value are
- considered null, (default value:
- 1.0e-14).
- double_tolerance : float, optional
- Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical. formula is
- (v1-v2)/v2 < relativetol. default is
- 0.001.
- compare_auxiliary : bool
- For meshes and meshescontainer: compare
- auxiliary data (i.e property fields,
- scopings...). default value is
- 'false'.
+ anyA: Any
+ anyB: Any
+ double_value: float
+ Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14).
+ double_tolerance: float, optional
+ Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1-v2)/v2 < relativeTol. Default is 0.001.
+ compare_auxiliary: bool
+ For meshes and meshescontainer: compare auxiliary data (i.e property fields, scopings...). Default value is 'false'.
Returns
-------
- included : bool
- Bool (true if belongs...)
- message : str
+ included: bool
+ bool (true if belongs...)
+ message: str
Examples
--------
@@ -107,13 +99,14 @@ def __init__(
self.inputs.compare_auxiliary.connect(compare_auxiliary)
@staticmethod
- def _spec():
- description = """Takes two Any objects and compares them.Supported types: Field,
- FieldsContainer, Mesh, MeshesContainer, PropertyField,
- PropertyFieldsContainer, Scoping, ScopingsContainer,
- StringField, standard types (double, int, string, vector
- of int, doubles, string). Note: all inputs related to
- fields, mesh, and so on are passed to each property check."""
+ def _spec() -> Specification:
+ description = r"""Takes two Any objects and compares them.Supported types: Field,
+FieldsContainer, Mesh, MeshesContainer, PropertyField,
+PropertyFieldsContainer, Scoping, ScopingsContainer, StringField,
+standard types (double, int, string, vector of int, doubles, string).
+Note: all inputs related to fields, mesh, and so on are passed to each
+property check.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -121,44 +114,31 @@ def _spec():
name="anyA",
type_names=["any"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="anyB",
type_names=["any"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="double_value",
type_names=["double"],
optional=False,
- document="""Double positive small value. smallest value
- considered during the comparison
- step. all the absolute values in the
- field less than this value are
- considered null, (default value:
- 1.0e-14).""",
+ document=r"""Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14).""",
),
3: PinSpecification(
name="double_tolerance",
type_names=["double"],
optional=True,
- document="""Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical. formula is
- (v1-v2)/v2 < relativetol. default is
- 0.001.""",
+ document=r"""Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1-v2)/v2 < relativeTol. Default is 0.001.""",
),
4: PinSpecification(
name="compare_auxiliary",
type_names=["bool"],
optional=False,
- document="""For meshes and meshescontainer: compare
- auxiliary data (i.e property fields,
- scopings...). default value is
- 'false'.""",
+ document=r"""For meshes and meshescontainer: compare auxiliary data (i.e property fields, scopings...). Default value is 'false'.""",
),
},
map_output_pin_spec={
@@ -166,20 +146,20 @@ def _spec():
name="included",
type_names=["bool"],
optional=False,
- document="""Bool (true if belongs...)""",
+ document=r"""bool (true if belongs...)""",
),
1: PinSpecification(
name="message",
type_names=["string"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -188,29 +168,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compare::any", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsIdenticalAnys:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIdenticalAnys
+ inputs:
+ An instance of InputsIdenticalAnys.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIdenticalAnys:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIdenticalAnys
+ outputs:
+ An instance of OutputsIdenticalAnys.
"""
return super().outputs
@@ -249,12 +236,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._compare_auxiliary)
@property
- def anyA(self):
- """Allows to connect anyA input to the operator.
+ def anyA(self) -> Input:
+ r"""Allows to connect anyA input to the operator.
- Parameters
- ----------
- my_anyA : Any
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -267,12 +255,13 @@ def anyA(self):
return self._anyA
@property
- def anyB(self):
- """Allows to connect anyB input to the operator.
+ def anyB(self) -> Input:
+ r"""Allows to connect anyB input to the operator.
- Parameters
- ----------
- my_anyB : Any
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -285,19 +274,15 @@ def anyB(self):
return self._anyB
@property
- def double_value(self):
- """Allows to connect double_value input to the operator.
+ def double_value(self) -> Input:
+ r"""Allows to connect double_value input to the operator.
- Double positive small value. smallest value
- considered during the comparison
- step. all the absolute values in the
- field less than this value are
- considered null, (default value:
- 1.0e-14).
+ Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14).
- Parameters
- ----------
- my_double_value : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -310,19 +295,15 @@ def double_value(self):
return self._double_value
@property
- def double_tolerance(self):
- """Allows to connect double_tolerance input to the operator.
+ def double_tolerance(self) -> Input:
+ r"""Allows to connect double_tolerance input to the operator.
- Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical. formula is
- (v1-v2)/v2 < relativetol. default is
- 0.001.
+ Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1-v2)/v2 < relativeTol. Default is 0.001.
- Parameters
- ----------
- my_double_tolerance : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -335,17 +316,15 @@ def double_tolerance(self):
return self._double_tolerance
@property
- def compare_auxiliary(self):
- """Allows to connect compare_auxiliary input to the operator.
+ def compare_auxiliary(self) -> Input:
+ r"""Allows to connect compare_auxiliary input to the operator.
- For meshes and meshescontainer: compare
- auxiliary data (i.e property fields,
- scopings...). default value is
- 'false'.
+ For meshes and meshescontainer: compare auxiliary data (i.e property fields, scopings...). Default value is 'false'.
- Parameters
- ----------
- my_compare_auxiliary : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -379,35 +358,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._message)
@property
- def included(self):
- """Allows to get included output of the operator
+ def included(self) -> Output:
+ r"""Allows to get included output of the operator
+
+ bool (true if belongs...)
Returns
- ----------
- my_included : bool
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_anys()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_included = op.outputs.included()
- """ # noqa: E501
+ """
return self._included
@property
- def message(self):
- """Allows to get message output of the operator
+ def message(self) -> Output:
+ r"""Allows to get message output of the operator
Returns
- ----------
- my_message : str
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_anys()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_message = op.outputs.message()
- """ # noqa: E501
+ """
return self._message
diff --git a/src/ansys/dpf/core/operators/logic/identical_fc.py b/src/ansys/dpf/core/operators/logic/identical_fc.py
index 5542af0bff1..9cc305f1101 100644
--- a/src/ansys/dpf/core/operators/logic/identical_fc.py
+++ b/src/ansys/dpf/core/operators/logic/identical_fc.py
@@ -4,39 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class identical_fc(Operator):
- """Checks if two fields_container are identical.
+ r"""Checks if two fields_container are identical.
+
Parameters
----------
- fields_containerA : FieldsContainer
- fields_containerB : FieldsContainer
- small_value : float, optional
- Double positive small value. smallest value
- which will be considered during the
- comparison step. all the abs(values)
- in the field less than this value are
- considered as null, (default
- value:1.0e-14).
- tolerance : float, optional
- Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical (v1-v2)/v2 <
- relativetol (default is 0.001).
+ fields_containerA: FieldsContainer
+ fields_containerB: FieldsContainer
+ small_value: float, optional
+ Double positive small value. Smallest value which will be considered during the comparison step. All the abs(values) in the field less than this value are considered as null, (default value:1.0e-14).
+ tolerance: float, optional
+ Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical (v1-v2)/v2 < relativeTol (default is 0.001).
Returns
-------
- boolean : bool
- Bool (true if identical...)
- message : str
+ boolean: bool
+ bool (true if identical...)
+ message: str
Examples
--------
@@ -90,8 +86,9 @@ def __init__(
self.inputs.tolerance.connect(tolerance)
@staticmethod
- def _spec():
- description = """Checks if two fields_container are identical."""
+ def _spec() -> Specification:
+ description = r"""Checks if two fields_container are identical.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -99,34 +96,25 @@ def _spec():
name="fields_containerA",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="fields_containerB",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="small_value",
type_names=["double"],
optional=True,
- document="""Double positive small value. smallest value
- which will be considered during the
- comparison step. all the abs(values)
- in the field less than this value are
- considered as null, (default
- value:1.0e-14).""",
+ document=r"""Double positive small value. Smallest value which will be considered during the comparison step. All the abs(values) in the field less than this value are considered as null, (default value:1.0e-14).""",
),
3: PinSpecification(
name="tolerance",
type_names=["double"],
optional=True,
- document="""Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical (v1-v2)/v2 <
- relativetol (default is 0.001).""",
+ document=r"""Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical (v1-v2)/v2 < relativeTol (default is 0.001).""",
),
},
map_output_pin_spec={
@@ -134,20 +122,20 @@ def _spec():
name="boolean",
type_names=["bool"],
optional=False,
- document="""Bool (true if identical...)""",
+ document=r"""bool (true if identical...)""",
),
1: PinSpecification(
name="message",
type_names=["string"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -156,29 +144,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="AreFieldsIdentical_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsIdenticalFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIdenticalFc
+ inputs:
+ An instance of InputsIdenticalFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIdenticalFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIdenticalFc
+ outputs:
+ An instance of OutputsIdenticalFc.
"""
return super().outputs
@@ -213,12 +208,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._tolerance)
@property
- def fields_containerA(self):
- """Allows to connect fields_containerA input to the operator.
+ def fields_containerA(self) -> Input:
+ r"""Allows to connect fields_containerA input to the operator.
- Parameters
- ----------
- my_fields_containerA : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -231,12 +227,13 @@ def fields_containerA(self):
return self._fields_containerA
@property
- def fields_containerB(self):
- """Allows to connect fields_containerB input to the operator.
+ def fields_containerB(self) -> Input:
+ r"""Allows to connect fields_containerB input to the operator.
- Parameters
- ----------
- my_fields_containerB : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -249,19 +246,15 @@ def fields_containerB(self):
return self._fields_containerB
@property
- def small_value(self):
- """Allows to connect small_value input to the operator.
+ def small_value(self) -> Input:
+ r"""Allows to connect small_value input to the operator.
- Double positive small value. smallest value
- which will be considered during the
- comparison step. all the abs(values)
- in the field less than this value are
- considered as null, (default
- value:1.0e-14).
+ Double positive small value. Smallest value which will be considered during the comparison step. All the abs(values) in the field less than this value are considered as null, (default value:1.0e-14).
- Parameters
- ----------
- my_small_value : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -274,18 +267,15 @@ def small_value(self):
return self._small_value
@property
- def tolerance(self):
- """Allows to connect tolerance input to the operator.
+ def tolerance(self) -> Input:
+ r"""Allows to connect tolerance input to the operator.
- Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical (v1-v2)/v2 <
- relativetol (default is 0.001).
+ Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical (v1-v2)/v2 < relativeTol (default is 0.001).
- Parameters
- ----------
- my_tolerance : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -319,35 +309,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._message)
@property
- def boolean(self):
- """Allows to get boolean output of the operator
+ def boolean(self) -> Output:
+ r"""Allows to get boolean output of the operator
+
+ bool (true if identical...)
Returns
- ----------
- my_boolean : bool
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_boolean = op.outputs.boolean()
- """ # noqa: E501
+ """
return self._boolean
@property
- def message(self):
- """Allows to get message output of the operator
+ def message(self) -> Output:
+ r"""Allows to get message output of the operator
Returns
- ----------
- my_message : str
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_message = op.outputs.message()
- """ # noqa: E501
+ """
return self._message
diff --git a/src/ansys/dpf/core/operators/logic/identical_fields.py b/src/ansys/dpf/core/operators/logic/identical_fields.py
index 18e21849393..f840443f5ca 100644
--- a/src/ansys/dpf/core/operators/logic/identical_fields.py
+++ b/src/ansys/dpf/core/operators/logic/identical_fields.py
@@ -4,40 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class identical_fields(Operator):
- """Check if two fields are identical.
+ r"""Check if two fields are identical.
+
Parameters
----------
- fieldA : Field
- fieldB : Field
- double_value : float, optional
- Double positive small value. smallest value
- considered during the comparison
- step. all the absolute values in the
- field less than this value are
- considered null, (default value:
- 1.0e-14).
- double_tolerance : float, optional
- Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical. formula is (v1
- - v2) / v2 < relativetol. default is
- 0.001.
+ fieldA: Field
+ fieldB: Field
+ double_value: float, optional
+ Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14).
+ double_tolerance: float, optional
+ Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1 - v2) / v2 < relativeTol. Default is 0.001.
Returns
-------
- boolean : bool
- Bool (true if identical...)
- message : str
+ boolean: bool
+ bool (true if identical...)
+ message: str
Examples
--------
@@ -91,8 +86,9 @@ def __init__(
self.inputs.double_tolerance.connect(double_tolerance)
@staticmethod
- def _spec():
- description = """Check if two fields are identical."""
+ def _spec() -> Specification:
+ description = r"""Check if two fields are identical.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -100,35 +96,25 @@ def _spec():
name="fieldA",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="fieldB",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="double_value",
type_names=["double"],
optional=True,
- document="""Double positive small value. smallest value
- considered during the comparison
- step. all the absolute values in the
- field less than this value are
- considered null, (default value:
- 1.0e-14).""",
+ document=r"""Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14).""",
),
3: PinSpecification(
name="double_tolerance",
type_names=["double"],
optional=True,
- document="""Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical. formula is (v1
- - v2) / v2 < relativetol. default is
- 0.001.""",
+ document=r"""Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1 - v2) / v2 < relativeTol. Default is 0.001.""",
),
},
map_output_pin_spec={
@@ -136,20 +122,20 @@ def _spec():
name="boolean",
type_names=["bool"],
optional=False,
- document="""Bool (true if identical...)""",
+ document=r"""bool (true if identical...)""",
),
1: PinSpecification(
name="message",
type_names=["string"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -158,29 +144,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="AreFieldsIdentical", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsIdenticalFields:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIdenticalFields
+ inputs:
+ An instance of InputsIdenticalFields.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIdenticalFields:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIdenticalFields
+ outputs:
+ An instance of OutputsIdenticalFields.
"""
return super().outputs
@@ -215,12 +208,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._double_tolerance)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Parameters
- ----------
- my_fieldA : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -233,12 +227,13 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Parameters
- ----------
- my_fieldB : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -251,19 +246,15 @@ def fieldB(self):
return self._fieldB
@property
- def double_value(self):
- """Allows to connect double_value input to the operator.
+ def double_value(self) -> Input:
+ r"""Allows to connect double_value input to the operator.
- Double positive small value. smallest value
- considered during the comparison
- step. all the absolute values in the
- field less than this value are
- considered null, (default value:
- 1.0e-14).
+ Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14).
- Parameters
- ----------
- my_double_value : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -276,19 +267,15 @@ def double_value(self):
return self._double_value
@property
- def double_tolerance(self):
- """Allows to connect double_tolerance input to the operator.
+ def double_tolerance(self) -> Input:
+ r"""Allows to connect double_tolerance input to the operator.
- Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical. formula is (v1
- - v2) / v2 < relativetol. default is
- 0.001.
+ Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1 - v2) / v2 < relativeTol. Default is 0.001.
- Parameters
- ----------
- my_double_tolerance : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -322,35 +309,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._message)
@property
- def boolean(self):
- """Allows to get boolean output of the operator
+ def boolean(self) -> Output:
+ r"""Allows to get boolean output of the operator
+
+ bool (true if identical...)
Returns
- ----------
- my_boolean : bool
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_fields()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_boolean = op.outputs.boolean()
- """ # noqa: E501
+ """
return self._boolean
@property
- def message(self):
- """Allows to get message output of the operator
+ def message(self) -> Output:
+ r"""Allows to get message output of the operator
Returns
- ----------
- my_message : str
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_fields()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_message = op.outputs.message()
- """ # noqa: E501
+ """
return self._message
diff --git a/src/ansys/dpf/core/operators/logic/identical_generic_data_containers.py b/src/ansys/dpf/core/operators/logic/identical_generic_data_containers.py
index 529768b5953..2e8068bcc77 100644
--- a/src/ansys/dpf/core/operators/logic/identical_generic_data_containers.py
+++ b/src/ansys/dpf/core/operators/logic/identical_generic_data_containers.py
@@ -4,50 +4,42 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class identical_generic_data_containers(Operator):
- """Takes two generic data containers and compares them. Supported types:
+ r"""Takes two generic data containers and compares them. Supported types:
Field, FieldsContainer, Mesh, MeshesContainer, PropertyField,
- PropertyFieldsContainer, Scoping, ScopingsContainer,
- StringFieldstandard types (double, int, string, vector of int,
- doubles, string)Note: all inputs related to fields, mesh, and so
- on are passed to each property check.
+ PropertyFieldsContainer, Scoping, ScopingsContainer, StringFieldstandard
+ types (double, int, string, vector of int, doubles, string)Note: all
+ inputs related to fields, mesh, and so on are passed to each property
+ check.
+
Parameters
----------
- generic_data_containerA : GenericDataContainer
- generic_data_containerB : GenericDataContainer
- double_value : float
- Double positive small value. smallest value
- considered during the comparison
- step. all the absolute values in the
- field less than this value are
- considered null, (default value:
- 1.0e-14).
- double_tolerance : float, optional
- Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical. formula is
- (v1-v2)/v2 < relativetol. default is
- 0.001.
- compare_auxiliary : bool
- For meshes and meshescontainer: compare
- auxiliary data (i.e property fields,
- scopings...). default value is
- 'false'.
+ generic_data_containerA: GenericDataContainer
+ generic_data_containerB: GenericDataContainer
+ double_value: float
+ Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14).
+ double_tolerance: float, optional
+ Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1-v2)/v2 < relativeTol. Default is 0.001.
+ compare_auxiliary: bool
+ For meshes and meshescontainer: compare auxiliary data (i.e property fields, scopings...). Default value is 'false'.
Returns
-------
- included : bool
- Bool (true if belongs...)
- message : str
+ included: bool
+ bool (true if belongs...)
+ message: str
Examples
--------
@@ -109,14 +101,14 @@ def __init__(
self.inputs.compare_auxiliary.connect(compare_auxiliary)
@staticmethod
- def _spec():
- description = """Takes two generic data containers and compares them. Supported types:
- Field, FieldsContainer, Mesh, MeshesContainer,
- PropertyField, PropertyFieldsContainer, Scoping,
- ScopingsContainer, StringFieldstandard types (double, int,
- string, vector of int, doubles, string)Note: all inputs
- related to fields, mesh, and so on are passed to each
- property check."""
+ def _spec() -> Specification:
+ description = r"""Takes two generic data containers and compares them. Supported types:
+Field, FieldsContainer, Mesh, MeshesContainer, PropertyField,
+PropertyFieldsContainer, Scoping, ScopingsContainer, StringFieldstandard
+types (double, int, string, vector of int, doubles, string)Note: all
+inputs related to fields, mesh, and so on are passed to each property
+check.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -124,44 +116,31 @@ def _spec():
name="generic_data_containerA",
type_names=["generic_data_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="generic_data_containerB",
type_names=["generic_data_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="double_value",
type_names=["double"],
optional=False,
- document="""Double positive small value. smallest value
- considered during the comparison
- step. all the absolute values in the
- field less than this value are
- considered null, (default value:
- 1.0e-14).""",
+ document=r"""Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14).""",
),
3: PinSpecification(
name="double_tolerance",
type_names=["double"],
optional=True,
- document="""Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical. formula is
- (v1-v2)/v2 < relativetol. default is
- 0.001.""",
+ document=r"""Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1-v2)/v2 < relativeTol. Default is 0.001.""",
),
4: PinSpecification(
name="compare_auxiliary",
type_names=["bool"],
optional=False,
- document="""For meshes and meshescontainer: compare
- auxiliary data (i.e property fields,
- scopings...). default value is
- 'false'.""",
+ document=r"""For meshes and meshescontainer: compare auxiliary data (i.e property fields, scopings...). Default value is 'false'.""",
),
},
map_output_pin_spec={
@@ -169,20 +148,20 @@ def _spec():
name="included",
type_names=["bool"],
optional=False,
- document="""Bool (true if belongs...)""",
+ document=r"""bool (true if belongs...)""",
),
1: PinSpecification(
name="message",
type_names=["string"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -191,31 +170,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="compare::generic_data_container", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsIdenticalGenericDataContainers:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIdenticalGenericDataContainers
+ inputs:
+ An instance of InputsIdenticalGenericDataContainers.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIdenticalGenericDataContainers:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIdenticalGenericDataContainers
+ outputs:
+ An instance of OutputsIdenticalGenericDataContainers.
"""
return super().outputs
@@ -264,12 +250,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._compare_auxiliary)
@property
- def generic_data_containerA(self):
- """Allows to connect generic_data_containerA input to the operator.
+ def generic_data_containerA(self) -> Input:
+ r"""Allows to connect generic_data_containerA input to the operator.
- Parameters
- ----------
- my_generic_data_containerA : GenericDataContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -282,12 +269,13 @@ def generic_data_containerA(self):
return self._generic_data_containerA
@property
- def generic_data_containerB(self):
- """Allows to connect generic_data_containerB input to the operator.
+ def generic_data_containerB(self) -> Input:
+ r"""Allows to connect generic_data_containerB input to the operator.
- Parameters
- ----------
- my_generic_data_containerB : GenericDataContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -300,19 +288,15 @@ def generic_data_containerB(self):
return self._generic_data_containerB
@property
- def double_value(self):
- """Allows to connect double_value input to the operator.
+ def double_value(self) -> Input:
+ r"""Allows to connect double_value input to the operator.
- Double positive small value. smallest value
- considered during the comparison
- step. all the absolute values in the
- field less than this value are
- considered null, (default value:
- 1.0e-14).
+ Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14).
- Parameters
- ----------
- my_double_value : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -325,19 +309,15 @@ def double_value(self):
return self._double_value
@property
- def double_tolerance(self):
- """Allows to connect double_tolerance input to the operator.
+ def double_tolerance(self) -> Input:
+ r"""Allows to connect double_tolerance input to the operator.
- Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical. formula is
- (v1-v2)/v2 < relativetol. default is
- 0.001.
+ Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1-v2)/v2 < relativeTol. Default is 0.001.
- Parameters
- ----------
- my_double_tolerance : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -350,17 +330,15 @@ def double_tolerance(self):
return self._double_tolerance
@property
- def compare_auxiliary(self):
- """Allows to connect compare_auxiliary input to the operator.
+ def compare_auxiliary(self) -> Input:
+ r"""Allows to connect compare_auxiliary input to the operator.
- For meshes and meshescontainer: compare
- auxiliary data (i.e property fields,
- scopings...). default value is
- 'false'.
+ For meshes and meshescontainer: compare auxiliary data (i.e property fields, scopings...). Default value is 'false'.
- Parameters
- ----------
- my_compare_auxiliary : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -398,35 +376,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._message)
@property
- def included(self):
- """Allows to get included output of the operator
+ def included(self) -> Output:
+ r"""Allows to get included output of the operator
+
+ bool (true if belongs...)
Returns
- ----------
- my_included : bool
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_generic_data_containers()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_included = op.outputs.included()
- """ # noqa: E501
+ """
return self._included
@property
- def message(self):
- """Allows to get message output of the operator
+ def message(self) -> Output:
+ r"""Allows to get message output of the operator
Returns
- ----------
- my_message : str
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_generic_data_containers()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_message = op.outputs.message()
- """ # noqa: E501
+ """
return self._message
diff --git a/src/ansys/dpf/core/operators/logic/identical_mc.py b/src/ansys/dpf/core/operators/logic/identical_mc.py
index 3fa38b68a77..c1137c78337 100644
--- a/src/ansys/dpf/core/operators/logic/identical_mc.py
+++ b/src/ansys/dpf/core/operators/logic/identical_mc.py
@@ -4,43 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class identical_mc(Operator):
- """Checks if two meshes_container are identical.
+ r"""Checks if two meshes_container are identical.
+
Parameters
----------
- meshes_containerA : MeshesContainer
- meshes_containerB : MeshesContainer
- small_value : float, optional
- Double positive small value. smallest value
- which will be considered during the
- comparison step. all the abs(values)
- in the field less than this value are
- considered as null, (default
- value:1.0e-14).
- tolerance : float, optional
- Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical (v1-v2)/v2 <
- relativetol (default is 0.001).
- compare_auxiliary : bool
- Compare auxiliary data (i.e property fields,
- scopings...). default value is
- 'false'.
+ meshes_containerA: MeshesContainer
+ meshes_containerB: MeshesContainer
+ small_value: float, optional
+ Double positive small value. Smallest value which will be considered during the comparison step. All the abs(values) in the field less than this value are considered as null, (default value:1.0e-14).
+ tolerance: float, optional
+ Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical (v1-v2)/v2 < relativeTol (default is 0.001).
+ compare_auxiliary: bool
+ compare auxiliary data (i.e property fields, scopings...). Default value is 'false'.
Returns
-------
- boolean : bool
- Bool (true if identical...)
- message : str
+ boolean: bool
+ bool (true if identical...)
+ message: str
Examples
--------
@@ -100,8 +94,9 @@ def __init__(
self.inputs.compare_auxiliary.connect(compare_auxiliary)
@staticmethod
- def _spec():
- description = """Checks if two meshes_container are identical."""
+ def _spec() -> Specification:
+ description = r"""Checks if two meshes_container are identical.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -109,42 +104,31 @@ def _spec():
name="meshes_containerA",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="meshes_containerB",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="small_value",
type_names=["double"],
optional=True,
- document="""Double positive small value. smallest value
- which will be considered during the
- comparison step. all the abs(values)
- in the field less than this value are
- considered as null, (default
- value:1.0e-14).""",
+ document=r"""Double positive small value. Smallest value which will be considered during the comparison step. All the abs(values) in the field less than this value are considered as null, (default value:1.0e-14).""",
),
3: PinSpecification(
name="tolerance",
type_names=["double"],
optional=True,
- document="""Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical (v1-v2)/v2 <
- relativetol (default is 0.001).""",
+ document=r"""Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical (v1-v2)/v2 < relativeTol (default is 0.001).""",
),
4: PinSpecification(
name="compare_auxiliary",
type_names=["bool"],
optional=False,
- document="""Compare auxiliary data (i.e property fields,
- scopings...). default value is
- 'false'.""",
+ document=r"""compare auxiliary data (i.e property fields, scopings...). Default value is 'false'.""",
),
},
map_output_pin_spec={
@@ -152,20 +136,20 @@ def _spec():
name="boolean",
type_names=["bool"],
optional=False,
- document="""Bool (true if identical...)""",
+ document=r"""bool (true if identical...)""",
),
1: PinSpecification(
name="message",
type_names=["string"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -174,29 +158,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compare::meshes_container", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsIdenticalMc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIdenticalMc
+ inputs:
+ An instance of InputsIdenticalMc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIdenticalMc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIdenticalMc
+ outputs:
+ An instance of OutputsIdenticalMc.
"""
return super().outputs
@@ -235,12 +226,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._compare_auxiliary)
@property
- def meshes_containerA(self):
- """Allows to connect meshes_containerA input to the operator.
+ def meshes_containerA(self) -> Input:
+ r"""Allows to connect meshes_containerA input to the operator.
- Parameters
- ----------
- my_meshes_containerA : MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -253,12 +245,13 @@ def meshes_containerA(self):
return self._meshes_containerA
@property
- def meshes_containerB(self):
- """Allows to connect meshes_containerB input to the operator.
+ def meshes_containerB(self) -> Input:
+ r"""Allows to connect meshes_containerB input to the operator.
- Parameters
- ----------
- my_meshes_containerB : MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -271,19 +264,15 @@ def meshes_containerB(self):
return self._meshes_containerB
@property
- def small_value(self):
- """Allows to connect small_value input to the operator.
+ def small_value(self) -> Input:
+ r"""Allows to connect small_value input to the operator.
- Double positive small value. smallest value
- which will be considered during the
- comparison step. all the abs(values)
- in the field less than this value are
- considered as null, (default
- value:1.0e-14).
+ Double positive small value. Smallest value which will be considered during the comparison step. All the abs(values) in the field less than this value are considered as null, (default value:1.0e-14).
- Parameters
- ----------
- my_small_value : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -296,18 +285,15 @@ def small_value(self):
return self._small_value
@property
- def tolerance(self):
- """Allows to connect tolerance input to the operator.
+ def tolerance(self) -> Input:
+ r"""Allows to connect tolerance input to the operator.
- Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical (v1-v2)/v2 <
- relativetol (default is 0.001).
+ Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical (v1-v2)/v2 < relativeTol (default is 0.001).
- Parameters
- ----------
- my_tolerance : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -320,16 +306,15 @@ def tolerance(self):
return self._tolerance
@property
- def compare_auxiliary(self):
- """Allows to connect compare_auxiliary input to the operator.
+ def compare_auxiliary(self) -> Input:
+ r"""Allows to connect compare_auxiliary input to the operator.
- Compare auxiliary data (i.e property fields,
- scopings...). default value is
- 'false'.
+ compare auxiliary data (i.e property fields, scopings...). Default value is 'false'.
- Parameters
- ----------
- my_compare_auxiliary : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -363,35 +348,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._message)
@property
- def boolean(self):
- """Allows to get boolean output of the operator
+ def boolean(self) -> Output:
+ r"""Allows to get boolean output of the operator
+
+ bool (true if identical...)
Returns
- ----------
- my_boolean : bool
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_mc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_boolean = op.outputs.boolean()
- """ # noqa: E501
+ """
return self._boolean
@property
- def message(self):
- """Allows to get message output of the operator
+ def message(self) -> Output:
+ r"""Allows to get message output of the operator
Returns
- ----------
- my_message : str
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_mc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_message = op.outputs.message()
- """ # noqa: E501
+ """
return self._message
diff --git a/src/ansys/dpf/core/operators/logic/identical_meshes.py b/src/ansys/dpf/core/operators/logic/identical_meshes.py
index 246808c5934..a8d3349af82 100644
--- a/src/ansys/dpf/core/operators/logic/identical_meshes.py
+++ b/src/ansys/dpf/core/operators/logic/identical_meshes.py
@@ -4,37 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class identical_meshes(Operator):
- """Takes two meshes and compares them. Note: When comparing mesh
- properties, the current behaviour is to verify that the properties
- in the first mesh (pin 0) are included in the second mesh (pin 1).
+ r"""Takes two meshes and compares them. Note: When comparing mesh
+ properties, the current behaviour is to verify that the properties in
+ the first mesh (pin 0) are included in the second mesh (pin 1).
+
Parameters
----------
- meshA : MeshedRegion
- meshB : MeshedRegion
- small_value : float, optional
- Define what is a small value for numeric
- comparison (default value:1.0e-14).
- tolerance : float, optional
- Define the relative tolerance ceil for
- numeric comparison (default is
- 0.001).
- compare_auxiliary : bool
- Compare auxiliary data (i.e property fields,
- scopings...). default value is
- 'false'.
+ meshA: MeshedRegion
+ meshB: MeshedRegion
+ small_value: float, optional
+ define what is a small value for numeric comparison (default value:1.0e-14).
+ tolerance: float, optional
+ define the relative tolerance ceil for numeric comparison (default is 0.001).
+ compare_auxiliary: bool
+ compare auxiliary data (i.e property fields, scopings...). Default value is 'false'.
Returns
-------
- are_identical : bool
+ are_identical: bool
Examples
--------
@@ -93,11 +93,11 @@ def __init__(
self.inputs.compare_auxiliary.connect(compare_auxiliary)
@staticmethod
- def _spec():
- description = """Takes two meshes and compares them. Note: When comparing mesh
- properties, the current behaviour is to verify that the
- properties in the first mesh (pin 0) are included in the
- second mesh (pin 1)."""
+ def _spec() -> Specification:
+ description = r"""Takes two meshes and compares them. Note: When comparing mesh
+properties, the current behaviour is to verify that the properties in
+the first mesh (pin 0) are included in the second mesh (pin 1).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -105,36 +105,31 @@ def _spec():
name="meshA",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="meshB",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="small_value",
type_names=["double"],
optional=True,
- document="""Define what is a small value for numeric
- comparison (default value:1.0e-14).""",
+ document=r"""define what is a small value for numeric comparison (default value:1.0e-14).""",
),
3: PinSpecification(
name="tolerance",
type_names=["double"],
optional=True,
- document="""Define the relative tolerance ceil for
- numeric comparison (default is
- 0.001).""",
+ document=r"""define the relative tolerance ceil for numeric comparison (default is 0.001).""",
),
4: PinSpecification(
name="compare_auxiliary",
type_names=["bool"],
optional=False,
- document="""Compare auxiliary data (i.e property fields,
- scopings...). default value is
- 'false'.""",
+ document=r"""compare auxiliary data (i.e property fields, scopings...). Default value is 'false'.""",
),
},
map_output_pin_spec={
@@ -142,14 +137,14 @@ def _spec():
name="are_identical",
type_names=["bool"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -158,29 +153,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compare::mesh", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsIdenticalMeshes:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIdenticalMeshes
+ inputs:
+ An instance of InputsIdenticalMeshes.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIdenticalMeshes:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIdenticalMeshes
+ outputs:
+ An instance of OutputsIdenticalMeshes.
"""
return super().outputs
@@ -221,12 +223,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._compare_auxiliary)
@property
- def meshA(self):
- """Allows to connect meshA input to the operator.
+ def meshA(self) -> Input:
+ r"""Allows to connect meshA input to the operator.
- Parameters
- ----------
- my_meshA : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -239,12 +242,13 @@ def meshA(self):
return self._meshA
@property
- def meshB(self):
- """Allows to connect meshB input to the operator.
+ def meshB(self) -> Input:
+ r"""Allows to connect meshB input to the operator.
- Parameters
- ----------
- my_meshB : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -257,15 +261,15 @@ def meshB(self):
return self._meshB
@property
- def small_value(self):
- """Allows to connect small_value input to the operator.
+ def small_value(self) -> Input:
+ r"""Allows to connect small_value input to the operator.
- Define what is a small value for numeric
- comparison (default value:1.0e-14).
+ define what is a small value for numeric comparison (default value:1.0e-14).
- Parameters
- ----------
- my_small_value : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -278,16 +282,15 @@ def small_value(self):
return self._small_value
@property
- def tolerance(self):
- """Allows to connect tolerance input to the operator.
+ def tolerance(self) -> Input:
+ r"""Allows to connect tolerance input to the operator.
- Define the relative tolerance ceil for
- numeric comparison (default is
- 0.001).
+ define the relative tolerance ceil for numeric comparison (default is 0.001).
- Parameters
- ----------
- my_tolerance : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -300,16 +303,15 @@ def tolerance(self):
return self._tolerance
@property
- def compare_auxiliary(self):
- """Allows to connect compare_auxiliary input to the operator.
+ def compare_auxiliary(self) -> Input:
+ r"""Allows to connect compare_auxiliary input to the operator.
- Compare auxiliary data (i.e property fields,
- scopings...). default value is
- 'false'.
+ compare auxiliary data (i.e property fields, scopings...). Default value is 'false'.
- Parameters
- ----------
- my_compare_auxiliary : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -340,18 +342,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._are_identical)
@property
- def are_identical(self):
- """Allows to get are_identical output of the operator
+ def are_identical(self) -> Output:
+ r"""Allows to get are_identical output of the operator
Returns
- ----------
- my_are_identical : bool
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_meshes()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_are_identical = op.outputs.are_identical()
- """ # noqa: E501
+ """
return self._are_identical
diff --git a/src/ansys/dpf/core/operators/logic/identical_pfc.py b/src/ansys/dpf/core/operators/logic/identical_pfc.py
index 771aa0da887..49d70c9e0dc 100644
--- a/src/ansys/dpf/core/operators/logic/identical_pfc.py
+++ b/src/ansys/dpf/core/operators/logic/identical_pfc.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class identical_pfc(Operator):
- """Checks if two property_fields_container are identical.
+ r"""Checks if two property_fields_container are identical.
+
Parameters
----------
- property_fields_containerA : PropertyFieldsContainer
- property_fields_containerB : PropertyFieldsContainer
+ property_fields_containerA: PropertyFieldsContainer
+ property_fields_containerB: PropertyFieldsContainer
Returns
-------
- boolean : bool
- Bool (true if identical...)
- message : str
+ boolean: bool
+ bool (true if identical...)
+ message: str
Examples
--------
@@ -67,8 +72,9 @@ def __init__(
self.inputs.property_fields_containerB.connect(property_fields_containerB)
@staticmethod
- def _spec():
- description = """Checks if two property_fields_container are identical."""
+ def _spec() -> Specification:
+ description = r"""Checks if two property_fields_container are identical.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -76,13 +82,13 @@ def _spec():
name="property_fields_containerA",
type_names=["property_fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="property_fields_containerB",
type_names=["property_fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -90,20 +96,20 @@ def _spec():
name="boolean",
type_names=["bool"],
optional=False,
- document="""Bool (true if identical...)""",
+ document=r"""bool (true if identical...)""",
),
1: PinSpecification(
name="message",
type_names=["string"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -112,31 +118,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="compare::property_fields_container", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsIdenticalPfc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIdenticalPfc
+ inputs:
+ An instance of InputsIdenticalPfc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIdenticalPfc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIdenticalPfc
+ outputs:
+ An instance of OutputsIdenticalPfc.
"""
return super().outputs
@@ -167,12 +180,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._property_fields_containerB)
@property
- def property_fields_containerA(self):
- """Allows to connect property_fields_containerA input to the operator.
+ def property_fields_containerA(self) -> Input:
+ r"""Allows to connect property_fields_containerA input to the operator.
- Parameters
- ----------
- my_property_fields_containerA : PropertyFieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -185,12 +199,13 @@ def property_fields_containerA(self):
return self._property_fields_containerA
@property
- def property_fields_containerB(self):
- """Allows to connect property_fields_containerB input to the operator.
+ def property_fields_containerB(self) -> Input:
+ r"""Allows to connect property_fields_containerB input to the operator.
- Parameters
- ----------
- my_property_fields_containerB : PropertyFieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -224,35 +239,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._message)
@property
- def boolean(self):
- """Allows to get boolean output of the operator
+ def boolean(self) -> Output:
+ r"""Allows to get boolean output of the operator
+
+ bool (true if identical...)
Returns
- ----------
- my_boolean : bool
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_pfc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_boolean = op.outputs.boolean()
- """ # noqa: E501
+ """
return self._boolean
@property
- def message(self):
- """Allows to get message output of the operator
+ def message(self) -> Output:
+ r"""Allows to get message output of the operator
Returns
- ----------
- my_message : str
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_pfc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_message = op.outputs.message()
- """ # noqa: E501
+ """
return self._message
diff --git a/src/ansys/dpf/core/operators/logic/identical_property_fields.py b/src/ansys/dpf/core/operators/logic/identical_property_fields.py
index 0bfcbb4feac..ada5354bb86 100644
--- a/src/ansys/dpf/core/operators/logic/identical_property_fields.py
+++ b/src/ansys/dpf/core/operators/logic/identical_property_fields.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class identical_property_fields(Operator):
- """Takes two property fields and compares them.
+ r"""Takes two property fields and compares them.
+
Parameters
----------
- property_fieldA : MeshedRegion
- property_fieldB : MeshedRegion
+ property_fieldA: MeshedRegion
+ property_fieldB: MeshedRegion
Returns
-------
- are_identical : bool
- information : str
+ are_identical: bool
+ information: str
Examples
--------
@@ -60,8 +65,9 @@ def __init__(
self.inputs.property_fieldB.connect(property_fieldB)
@staticmethod
- def _spec():
- description = """Takes two property fields and compares them."""
+ def _spec() -> Specification:
+ description = r"""Takes two property fields and compares them.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -69,13 +75,13 @@ def _spec():
name="property_fieldA",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="property_fieldB",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -83,20 +89,20 @@ def _spec():
name="are_identical",
type_names=["bool"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="information",
type_names=["string"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -105,29 +111,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compare::property_field", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsIdenticalPropertyFields:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIdenticalPropertyFields
+ inputs:
+ An instance of InputsIdenticalPropertyFields.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIdenticalPropertyFields:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIdenticalPropertyFields
+ outputs:
+ An instance of OutputsIdenticalPropertyFields.
"""
return super().outputs
@@ -158,12 +171,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._property_fieldB)
@property
- def property_fieldA(self):
- """Allows to connect property_fieldA input to the operator.
+ def property_fieldA(self) -> Input:
+ r"""Allows to connect property_fieldA input to the operator.
- Parameters
- ----------
- my_property_fieldA : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -176,12 +190,13 @@ def property_fieldA(self):
return self._property_fieldA
@property
- def property_fieldB(self):
- """Allows to connect property_fieldB input to the operator.
+ def property_fieldB(self) -> Input:
+ r"""Allows to connect property_fieldB input to the operator.
- Parameters
- ----------
- my_property_fieldB : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -219,35 +234,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._information)
@property
- def are_identical(self):
- """Allows to get are_identical output of the operator
+ def are_identical(self) -> Output:
+ r"""Allows to get are_identical output of the operator
Returns
- ----------
- my_are_identical : bool
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_property_fields()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_are_identical = op.outputs.are_identical()
- """ # noqa: E501
+ """
return self._are_identical
@property
- def information(self):
- """Allows to get information output of the operator
+ def information(self) -> Output:
+ r"""Allows to get information output of the operator
Returns
- ----------
- my_information : str
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_property_fields()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_information = op.outputs.information()
- """ # noqa: E501
+ """
return self._information
diff --git a/src/ansys/dpf/core/operators/logic/identical_sc.py b/src/ansys/dpf/core/operators/logic/identical_sc.py
index fcdbd599a30..c70d06ff97d 100644
--- a/src/ansys/dpf/core/operators/logic/identical_sc.py
+++ b/src/ansys/dpf/core/operators/logic/identical_sc.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class identical_sc(Operator):
- """Checks if two scopings_container are identical.
+ r"""Checks if two scopings_container are identical.
+
Parameters
----------
- scopings_containerA : ScopingsContainer
- scopings_containerB : ScopingsContainer
+ scopings_containerA: ScopingsContainer
+ scopings_containerB: ScopingsContainer
Returns
-------
- boolean : bool
- Bool (true if identical...)
- message : str
+ boolean: bool
+ bool (true if identical...)
+ message: str
Examples
--------
@@ -67,8 +72,9 @@ def __init__(
self.inputs.scopings_containerB.connect(scopings_containerB)
@staticmethod
- def _spec():
- description = """Checks if two scopings_container are identical."""
+ def _spec() -> Specification:
+ description = r"""Checks if two scopings_container are identical.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -76,13 +82,13 @@ def _spec():
name="scopings_containerA",
type_names=["scopings_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="scopings_containerB",
type_names=["scopings_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -90,20 +96,20 @@ def _spec():
name="boolean",
type_names=["bool"],
optional=False,
- document="""Bool (true if identical...)""",
+ document=r"""bool (true if identical...)""",
),
1: PinSpecification(
name="message",
type_names=["string"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -112,31 +118,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="compare::scopings_container", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsIdenticalSc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIdenticalSc
+ inputs:
+ An instance of InputsIdenticalSc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIdenticalSc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIdenticalSc
+ outputs:
+ An instance of OutputsIdenticalSc.
"""
return super().outputs
@@ -163,12 +176,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._scopings_containerB)
@property
- def scopings_containerA(self):
- """Allows to connect scopings_containerA input to the operator.
+ def scopings_containerA(self) -> Input:
+ r"""Allows to connect scopings_containerA input to the operator.
- Parameters
- ----------
- my_scopings_containerA : ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -181,12 +195,13 @@ def scopings_containerA(self):
return self._scopings_containerA
@property
- def scopings_containerB(self):
- """Allows to connect scopings_containerB input to the operator.
+ def scopings_containerB(self) -> Input:
+ r"""Allows to connect scopings_containerB input to the operator.
- Parameters
- ----------
- my_scopings_containerB : ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -220,35 +235,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._message)
@property
- def boolean(self):
- """Allows to get boolean output of the operator
+ def boolean(self) -> Output:
+ r"""Allows to get boolean output of the operator
+
+ bool (true if identical...)
Returns
- ----------
- my_boolean : bool
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_sc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_boolean = op.outputs.boolean()
- """ # noqa: E501
+ """
return self._boolean
@property
- def message(self):
- """Allows to get message output of the operator
+ def message(self) -> Output:
+ r"""Allows to get message output of the operator
Returns
- ----------
- my_message : str
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_sc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_message = op.outputs.message()
- """ # noqa: E501
+ """
return self._message
diff --git a/src/ansys/dpf/core/operators/logic/identical_scopings.py b/src/ansys/dpf/core/operators/logic/identical_scopings.py
index 368e27b765a..ec68852452b 100644
--- a/src/ansys/dpf/core/operators/logic/identical_scopings.py
+++ b/src/ansys/dpf/core/operators/logic/identical_scopings.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class identical_scopings(Operator):
- """Check if two scopings are identical.
+ r"""Check if two scopings are identical.
+
Parameters
----------
- scopingA : Scoping
- scopingB : Scoping
+ scopingA: Scoping
+ scopingB: Scoping
Returns
-------
- boolean : bool
- Bool (true if identical...)
- message : str
+ boolean: bool
+ bool (true if identical...)
+ message: str
Examples
--------
@@ -59,8 +64,9 @@ def __init__(self, scopingA=None, scopingB=None, config=None, server=None):
self.inputs.scopingB.connect(scopingB)
@staticmethod
- def _spec():
- description = """Check if two scopings are identical."""
+ def _spec() -> Specification:
+ description = r"""Check if two scopings are identical.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -68,13 +74,13 @@ def _spec():
name="scopingA",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="scopingB",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -82,20 +88,20 @@ def _spec():
name="boolean",
type_names=["bool"],
optional=False,
- document="""Bool (true if identical...)""",
+ document=r"""bool (true if identical...)""",
),
1: PinSpecification(
name="message",
type_names=["string"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -104,29 +110,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compare::scoping", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsIdenticalScopings:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIdenticalScopings
+ inputs:
+ An instance of InputsIdenticalScopings.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIdenticalScopings:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIdenticalScopings
+ outputs:
+ An instance of OutputsIdenticalScopings.
"""
return super().outputs
@@ -153,12 +166,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._scopingB)
@property
- def scopingA(self):
- """Allows to connect scopingA input to the operator.
+ def scopingA(self) -> Input:
+ r"""Allows to connect scopingA input to the operator.
- Parameters
- ----------
- my_scopingA : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -171,12 +185,13 @@ def scopingA(self):
return self._scopingA
@property
- def scopingB(self):
- """Allows to connect scopingB input to the operator.
+ def scopingB(self) -> Input:
+ r"""Allows to connect scopingB input to the operator.
- Parameters
- ----------
- my_scopingB : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -210,35 +225,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._message)
@property
- def boolean(self):
- """Allows to get boolean output of the operator
+ def boolean(self) -> Output:
+ r"""Allows to get boolean output of the operator
+
+ bool (true if identical...)
Returns
- ----------
- my_boolean : bool
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_scopings()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_boolean = op.outputs.boolean()
- """ # noqa: E501
+ """
return self._boolean
@property
- def message(self):
- """Allows to get message output of the operator
+ def message(self) -> Output:
+ r"""Allows to get message output of the operator
Returns
- ----------
- my_message : str
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_scopings()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_message = op.outputs.message()
- """ # noqa: E501
+ """
return self._message
diff --git a/src/ansys/dpf/core/operators/logic/identical_string_fields.py b/src/ansys/dpf/core/operators/logic/identical_string_fields.py
index 8866603e28a..097d7018e1b 100644
--- a/src/ansys/dpf/core/operators/logic/identical_string_fields.py
+++ b/src/ansys/dpf/core/operators/logic/identical_string_fields.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class identical_string_fields(Operator):
- """Takes two string fields and compares them.
+ r"""Takes two string fields and compares them.
+
Parameters
----------
- string_fieldA : StringField
- string_fieldB : StringField
+ string_fieldA: StringField
+ string_fieldB: StringField
Returns
-------
- are_identical : bool
- information : str
+ are_identical: bool
+ information: str
Examples
--------
@@ -60,8 +65,9 @@ def __init__(
self.inputs.string_fieldB.connect(string_fieldB)
@staticmethod
- def _spec():
- description = """Takes two string fields and compares them."""
+ def _spec() -> Specification:
+ description = r"""Takes two string fields and compares them.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -69,13 +75,13 @@ def _spec():
name="string_fieldA",
type_names=["string_field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="string_fieldB",
type_names=["string_field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -83,20 +89,20 @@ def _spec():
name="are_identical",
type_names=["bool"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="information",
type_names=["string"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -105,29 +111,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compare::string_field", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsIdenticalStringFields:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIdenticalStringFields
+ inputs:
+ An instance of InputsIdenticalStringFields.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIdenticalStringFields:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIdenticalStringFields
+ outputs:
+ An instance of OutputsIdenticalStringFields.
"""
return super().outputs
@@ -158,12 +171,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._string_fieldB)
@property
- def string_fieldA(self):
- """Allows to connect string_fieldA input to the operator.
+ def string_fieldA(self) -> Input:
+ r"""Allows to connect string_fieldA input to the operator.
- Parameters
- ----------
- my_string_fieldA : StringField
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -176,12 +190,13 @@ def string_fieldA(self):
return self._string_fieldA
@property
- def string_fieldB(self):
- """Allows to connect string_fieldB input to the operator.
+ def string_fieldB(self) -> Input:
+ r"""Allows to connect string_fieldB input to the operator.
- Parameters
- ----------
- my_string_fieldB : StringField
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -217,35 +232,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._information)
@property
- def are_identical(self):
- """Allows to get are_identical output of the operator
+ def are_identical(self) -> Output:
+ r"""Allows to get are_identical output of the operator
Returns
- ----------
- my_are_identical : bool
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_string_fields()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_are_identical = op.outputs.are_identical()
- """ # noqa: E501
+ """
return self._are_identical
@property
- def information(self):
- """Allows to get information output of the operator
+ def information(self) -> Output:
+ r"""Allows to get information output of the operator
Returns
- ----------
- my_information : str
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.identical_string_fields()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_information = op.outputs.information()
- """ # noqa: E501
+ """
return self._information
diff --git a/src/ansys/dpf/core/operators/logic/included_fields.py b/src/ansys/dpf/core/operators/logic/included_fields.py
index 1e1fec522f6..76167bc235b 100644
--- a/src/ansys/dpf/core/operators/logic/included_fields.py
+++ b/src/ansys/dpf/core/operators/logic/included_fields.py
@@ -4,40 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class included_fields(Operator):
- """Checks if one field belongs to another.
+ r"""Checks if one field belongs to another.
+
Parameters
----------
- fieldA : Field
- fieldB : Field
- double_value : float
- Double positive small value. smallest value
- considered during the comparison
- step. all the absolute values in the
- field less than this value are
- considered null, (default value:
- 1.0e-14).
- double_tolerance : float, optional
- Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical. formula is
- (v1-v2)/v2 < relativetol. default is
- 0.001.
+ fieldA: Field
+ fieldB: Field
+ double_value: float
+ Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14).
+ double_tolerance: float, optional
+ Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1-v2)/v2 < relativeTol. Default is 0.001.
Returns
-------
- included : bool
- Bool (true if belongs...)
- message : str
+ included: bool
+ bool (true if belongs...)
+ message: str
Examples
--------
@@ -91,8 +86,9 @@ def __init__(
self.inputs.double_tolerance.connect(double_tolerance)
@staticmethod
- def _spec():
- description = """Checks if one field belongs to another."""
+ def _spec() -> Specification:
+ description = r"""Checks if one field belongs to another.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -100,35 +96,25 @@ def _spec():
name="fieldA",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="fieldB",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="double_value",
type_names=["double"],
optional=False,
- document="""Double positive small value. smallest value
- considered during the comparison
- step. all the absolute values in the
- field less than this value are
- considered null, (default value:
- 1.0e-14).""",
+ document=r"""Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14).""",
),
3: PinSpecification(
name="double_tolerance",
type_names=["double"],
optional=True,
- document="""Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical. formula is
- (v1-v2)/v2 < relativetol. default is
- 0.001.""",
+ document=r"""Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1-v2)/v2 < relativeTol. Default is 0.001.""",
),
},
map_output_pin_spec={
@@ -136,20 +122,20 @@ def _spec():
name="included",
type_names=["bool"],
optional=False,
- document="""Bool (true if belongs...)""",
+ document=r"""bool (true if belongs...)""",
),
1: PinSpecification(
name="message",
type_names=["string"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -158,29 +144,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="Are_fields_included", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsIncludedFields:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIncludedFields
+ inputs:
+ An instance of InputsIncludedFields.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIncludedFields:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIncludedFields
+ outputs:
+ An instance of OutputsIncludedFields.
"""
return super().outputs
@@ -215,12 +208,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._double_tolerance)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Parameters
- ----------
- my_fieldA : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -233,12 +227,13 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Parameters
- ----------
- my_fieldB : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -251,19 +246,15 @@ def fieldB(self):
return self._fieldB
@property
- def double_value(self):
- """Allows to connect double_value input to the operator.
+ def double_value(self) -> Input:
+ r"""Allows to connect double_value input to the operator.
- Double positive small value. smallest value
- considered during the comparison
- step. all the absolute values in the
- field less than this value are
- considered null, (default value:
- 1.0e-14).
+ Double positive small value. Smallest value considered during the comparison step. All the absolute values in the field less than this value are considered null, (default value: 1.0e-14).
- Parameters
- ----------
- my_double_value : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -276,19 +267,15 @@ def double_value(self):
return self._double_value
@property
- def double_tolerance(self):
- """Allows to connect double_tolerance input to the operator.
+ def double_tolerance(self) -> Input:
+ r"""Allows to connect double_tolerance input to the operator.
- Double relative tolerance. maximum tolerance
- gap between two compared values.
- values within relative tolerance are
- considered identical. formula is
- (v1-v2)/v2 < relativetol. default is
- 0.001.
+ Double relative tolerance. Maximum tolerance gap between two compared values. Values within relative tolerance are considered identical. Formula is (v1-v2)/v2 < relativeTol. Default is 0.001.
- Parameters
- ----------
- my_double_tolerance : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -322,35 +309,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._message)
@property
- def included(self):
- """Allows to get included output of the operator
+ def included(self) -> Output:
+ r"""Allows to get included output of the operator
+
+ bool (true if belongs...)
Returns
- ----------
- my_included : bool
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.included_fields()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_included = op.outputs.included()
- """ # noqa: E501
+ """
return self._included
@property
- def message(self):
- """Allows to get message output of the operator
+ def message(self) -> Output:
+ r"""Allows to get message output of the operator
Returns
- ----------
- my_message : str
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.included_fields()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_message = op.outputs.message()
- """ # noqa: E501
+ """
return self._message
diff --git a/src/ansys/dpf/core/operators/logic/solid_shell_fields.py b/src/ansys/dpf/core/operators/logic/solid_shell_fields.py
index 4c3b40a6505..73d185a8f46 100644
--- a/src/ansys/dpf/core/operators/logic/solid_shell_fields.py
+++ b/src/ansys/dpf/core/operators/logic/solid_shell_fields.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class solid_shell_fields(Operator):
- """Merges shell and solid fields for each time step/frequency in the
- fields container.
+ r"""Merges shell and solid fields for each time step/frequency in the fields
+ container.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -51,9 +56,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Merges shell and solid fields for each time step/frequency in the
- fields container."""
+ def _spec() -> Specification:
+ description = r"""Merges shell and solid fields for each time step/frequency in the fields
+container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,7 +67,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -69,14 +75,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -85,29 +91,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="merge::solid_shell_fields", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSolidShellFields:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSolidShellFields
+ inputs:
+ An instance of InputsSolidShellFields.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSolidShellFields:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSolidShellFields
+ outputs:
+ An instance of OutputsSolidShellFields.
"""
return super().outputs
@@ -132,12 +145,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -168,18 +182,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.solid_shell_fields()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/logic/split_data_sources.py b/src/ansys/dpf/core/operators/logic/split_data_sources.py
index 847c575b5ab..4bf7ba21079 100644
--- a/src/ansys/dpf/core/operators/logic/split_data_sources.py
+++ b/src/ansys/dpf/core/operators/logic/split_data_sources.py
@@ -4,32 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class split_data_sources(Operator):
- """Splits a Data Sources into multiple coherent data sources, actual
- number of outputs is always less or equal to the given desired
- number of ouputs.
+ r"""Splits a Data Sources into multiple coherent data sources, actual number
+ of outputs is always less or equal to the given desired number of
+ ouputs.
+
Parameters
----------
- data_sources : DataSources
+ data_sources: DataSources
Data sources to split.
- output_count : int
+ output_count: int
Number of desired outputs.
Returns
-------
- output_count : int
+ output_count: int
Actual number of outputs.
- outputs1 : DataSources
+ outputs1: DataSources
Data sources outputs.
- outputs2 : DataSources
+ outputs2: DataSources
Data sources outputs.
Examples
@@ -67,10 +72,11 @@ def __init__(self, data_sources=None, output_count=None, config=None, server=Non
self.inputs.output_count.connect(output_count)
@staticmethod
- def _spec():
- description = """Splits a Data Sources into multiple coherent data sources, actual
- number of outputs is always less or equal to the given
- desired number of ouputs."""
+ def _spec() -> Specification:
+ description = r"""Splits a Data Sources into multiple coherent data sources, actual number
+of outputs is always less or equal to the given desired number of
+ouputs.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -78,13 +84,13 @@ def _spec():
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data sources to split.""",
+ document=r"""Data sources to split.""",
),
1: PinSpecification(
name="output_count",
type_names=["int32"],
optional=False,
- document="""Number of desired outputs.""",
+ document=r"""Number of desired outputs.""",
),
},
map_output_pin_spec={
@@ -92,26 +98,26 @@ def _spec():
name="output_count",
type_names=["int32"],
optional=False,
- document="""Actual number of outputs.""",
+ document=r"""Actual number of outputs.""",
),
0: PinSpecification(
name="outputs1",
type_names=["data_sources"],
optional=False,
- document="""Data sources outputs.""",
+ document=r"""Data sources outputs.""",
),
1: PinSpecification(
name="outputs2",
type_names=["data_sources"],
optional=False,
- document="""Data sources outputs.""",
+ document=r"""Data sources outputs.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -120,29 +126,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="splitter::data_sources", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSplitDataSources:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSplitDataSources
+ inputs:
+ An instance of InputsSplitDataSources.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSplitDataSources:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSplitDataSources
+ outputs:
+ An instance of OutputsSplitDataSources.
"""
return super().outputs
@@ -169,14 +182,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._output_count)
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
Data sources to split.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -189,14 +203,15 @@ def data_sources(self):
return self._data_sources
@property
- def output_count(self):
- """Allows to connect output_count input to the operator.
+ def output_count(self) -> Input:
+ r"""Allows to connect output_count input to the operator.
Number of desired outputs.
- Parameters
- ----------
- my_output_count : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -233,52 +248,61 @@ def __init__(self, op: Operator):
self._outputs.append(self._outputs2)
@property
- def output_count(self):
- """Allows to get output_count output of the operator
+ def output_count(self) -> Output:
+ r"""Allows to get output_count output of the operator
+
+ Actual number of outputs.
Returns
- ----------
- my_output_count : int
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.split_data_sources()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_output_count = op.outputs.output_count()
- """ # noqa: E501
+ """
return self._output_count
@property
- def outputs1(self):
- """Allows to get outputs1 output of the operator
+ def outputs1(self) -> Output:
+ r"""Allows to get outputs1 output of the operator
+
+ Data sources outputs.
Returns
- ----------
- my_outputs1 : DataSources
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.split_data_sources()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_outputs1 = op.outputs.outputs1()
- """ # noqa: E501
+ """
return self._outputs1
@property
- def outputs2(self):
- """Allows to get outputs2 output of the operator
+ def outputs2(self) -> Output:
+ r"""Allows to get outputs2 output of the operator
+
+ Data sources outputs.
Returns
- ----------
- my_outputs2 : DataSources
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.split_data_sources()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_outputs2 = op.outputs.outputs2()
- """ # noqa: E501
+ """
return self._outputs2
diff --git a/src/ansys/dpf/core/operators/logic/split_streams.py b/src/ansys/dpf/core/operators/logic/split_streams.py
index 5a5a4504201..1937ab907e2 100644
--- a/src/ansys/dpf/core/operators/logic/split_streams.py
+++ b/src/ansys/dpf/core/operators/logic/split_streams.py
@@ -4,32 +4,36 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class split_streams(Operator):
- """Splits a Streams into multiple coherent streams, actual number of
- outputs is always less or equal to the given desired number of
- ouputs.
+ r"""Splits a Streams into multiple coherent streams, actual number of
+ outputs is always less or equal to the given desired number of ouputs.
+
Parameters
----------
- streams : StreamsContainer
+ streams: StreamsContainer
Streams to split.
- output_count : int
+ output_count: int
Number of desired outputs.
Returns
-------
- output_count : int
+ output_count: int
Actual number of outputs.
- outputs1 : StreamsContainer
+ outputs1: StreamsContainer
Streams outputs.
- outputs2 : StreamsContainer
+ outputs2: StreamsContainer
Streams outputs.
Examples
@@ -67,10 +71,10 @@ def __init__(self, streams=None, output_count=None, config=None, server=None):
self.inputs.output_count.connect(output_count)
@staticmethod
- def _spec():
- description = """Splits a Streams into multiple coherent streams, actual number of
- outputs is always less or equal to the given desired
- number of ouputs."""
+ def _spec() -> Specification:
+ description = r"""Splits a Streams into multiple coherent streams, actual number of
+outputs is always less or equal to the given desired number of ouputs.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -78,13 +82,13 @@ def _spec():
name="streams",
type_names=["streams_container"],
optional=False,
- document="""Streams to split.""",
+ document=r"""Streams to split.""",
),
1: PinSpecification(
name="output_count",
type_names=["int32"],
optional=False,
- document="""Number of desired outputs.""",
+ document=r"""Number of desired outputs.""",
),
},
map_output_pin_spec={
@@ -92,26 +96,26 @@ def _spec():
name="output_count",
type_names=["int32"],
optional=False,
- document="""Actual number of outputs.""",
+ document=r"""Actual number of outputs.""",
),
0: PinSpecification(
name="outputs1",
type_names=["streams_container"],
optional=False,
- document="""Streams outputs.""",
+ document=r"""Streams outputs.""",
),
1: PinSpecification(
name="outputs2",
type_names=["streams_container"],
optional=False,
- document="""Streams outputs.""",
+ document=r"""Streams outputs.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -120,29 +124,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="splitter::streams", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSplitStreams:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSplitStreams
+ inputs:
+ An instance of InputsSplitStreams.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSplitStreams:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSplitStreams
+ outputs:
+ An instance of OutputsSplitStreams.
"""
return super().outputs
@@ -169,14 +180,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._output_count)
@property
- def streams(self):
- """Allows to connect streams input to the operator.
+ def streams(self) -> Input:
+ r"""Allows to connect streams input to the operator.
Streams to split.
- Parameters
- ----------
- my_streams : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -189,14 +201,15 @@ def streams(self):
return self._streams
@property
- def output_count(self):
- """Allows to connect output_count input to the operator.
+ def output_count(self) -> Input:
+ r"""Allows to connect output_count input to the operator.
Number of desired outputs.
- Parameters
- ----------
- my_output_count : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -233,52 +246,61 @@ def __init__(self, op: Operator):
self._outputs.append(self._outputs2)
@property
- def output_count(self):
- """Allows to get output_count output of the operator
+ def output_count(self) -> Output:
+ r"""Allows to get output_count output of the operator
+
+ Actual number of outputs.
Returns
- ----------
- my_output_count : int
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.split_streams()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_output_count = op.outputs.output_count()
- """ # noqa: E501
+ """
return self._output_count
@property
- def outputs1(self):
- """Allows to get outputs1 output of the operator
+ def outputs1(self) -> Output:
+ r"""Allows to get outputs1 output of the operator
+
+ Streams outputs.
Returns
- ----------
- my_outputs1 : StreamsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.split_streams()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_outputs1 = op.outputs.outputs1()
- """ # noqa: E501
+ """
return self._outputs1
@property
- def outputs2(self):
- """Allows to get outputs2 output of the operator
+ def outputs2(self) -> Output:
+ r"""Allows to get outputs2 output of the operator
+
+ Streams outputs.
Returns
- ----------
- my_outputs2 : StreamsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.logic.split_streams()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_outputs2 = op.outputs.outputs2()
- """ # noqa: E501
+ """
return self._outputs2
diff --git a/src/ansys/dpf/core/operators/mapping/fft.py b/src/ansys/dpf/core/operators/mapping/fft.py
index b4b74336e6b..45abccf95e7 100644
--- a/src/ansys/dpf/core/operators/mapping/fft.py
+++ b/src/ansys/dpf/core/operators/mapping/fft.py
@@ -4,53 +4,48 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class fft(Operator):
- """Computes the Fast Fourier Transform on each component of input Field
- or each field of input Fields Container (you can use
- transpose_fields_container to have relevant scoping). Fields are
- assumed with the same scoping, number of components and
- representing equally spaced data, ideally resampled to have a 2^n
- points (prepare_sampling_fft with time_freq_interpolation can help
- creating these fields). If Complex label is present, Complex to
- Complex FFT performed otherwise Real to Complex is performed (only
- half of the coefficient will be returned).
+ r"""Computes the Fast Fourier Transform on each component of input Field or
+ each field of input Fields Container (you can use
+ transpose_fields_container to have relevant scoping). Fields are assumed
+ with the same scoping, number of components and representing equally
+ spaced data, ideally resampled to have a 2^n points
+ (prepare_sampling_fft with time_freq_interpolation can help creating
+ these fields). If Complex label is present, Complex to Complex FFT
+ performed otherwise Real to Complex is performed (only half of the
+ coefficient will be returned).
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container.
- scale_forward_transform : float, optional
- Scale for forward transform, default is
- 2/field_num_elementary_data.
- inplace : bool, optional
+ field: Field or FieldsContainer
+ Field or Fields Container.
+ scale_forward_transform: float, optional
+ Scale for Forward Transform, default is 2/field_num_elementary_data.
+ inplace: bool, optional
True if inplace, default is false.
- force_fft_points : int, optional
- Explicitely define number of fft points to
- either rescope or perform zero
- padding.
- cutoff_frequency : float, optional
- Restrict output frequency up to this cutoff
- frequency
- scale_right_amplitude : bool, optional
- If set to true (default is false),
- 2/field_num_entities scaling will be
- applied, to have right amplitude
- values.
+ force_fft_points: int, optional
+ Explicitely define number of fft points to either rescope or perform zero padding.
+ cutoff_frequency: float, optional
+ Restrict output frequency up to this cutoff frequency
+ scale_right_amplitude: bool, optional
+ If set to true (default is false), 2/field_num_entities scaling will be applied, to have right amplitude values.
Returns
-------
- fields_container : FieldsContainer
- Output complex fields container with labels
- matching input fields container. no
- supports binded, but
- prepare_sampling_fft provides it.
+ fields_container: FieldsContainer
+ Output Complex Fields Container with labels matching input Fields Container. No supports binded, but prepare_sampling_fft provides it.
Examples
--------
@@ -115,17 +110,17 @@ def __init__(
self.inputs.scale_right_amplitude.connect(scale_right_amplitude)
@staticmethod
- def _spec():
- description = """Computes the Fast Fourier Transform on each component of input Field
- or each field of input Fields Container (you can use
- transpose_fields_container to have relevant scoping).
- Fields are assumed with the same scoping, number of
- components and representing equally spaced data, ideally
- resampled to have a 2^n points (prepare_sampling_fft with
- time_freq_interpolation can help creating these fields).
- If Complex label is present, Complex to Complex FFT
- performed otherwise Real to Complex is performed (only
- half of the coefficient will be returned)."""
+ def _spec() -> Specification:
+ description = r"""Computes the Fast Fourier Transform on each component of input Field or
+each field of input Fields Container (you can use
+transpose_fields_container to have relevant scoping). Fields are assumed
+with the same scoping, number of components and representing equally
+spaced data, ideally resampled to have a 2^n points
+(prepare_sampling_fft with time_freq_interpolation can help creating
+these fields). If Complex label is present, Complex to Complex FFT
+performed otherwise Real to Complex is performed (only half of the
+coefficient will be returned).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -133,44 +128,37 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container.""",
+ document=r"""Field or Fields Container.""",
),
3: PinSpecification(
name="scale_forward_transform",
type_names=["double"],
optional=True,
- document="""Scale for forward transform, default is
- 2/field_num_elementary_data.""",
+ document=r"""Scale for Forward Transform, default is 2/field_num_elementary_data.""",
),
4: PinSpecification(
name="inplace",
type_names=["bool"],
optional=True,
- document="""True if inplace, default is false.""",
+ document=r"""True if inplace, default is false.""",
),
5: PinSpecification(
name="force_fft_points",
type_names=["int32"],
optional=True,
- document="""Explicitely define number of fft points to
- either rescope or perform zero
- padding.""",
+ document=r"""Explicitely define number of fft points to either rescope or perform zero padding.""",
),
6: PinSpecification(
name="cutoff_frequency",
type_names=["double"],
optional=True,
- document="""Restrict output frequency up to this cutoff
- frequency""",
+ document=r"""Restrict output frequency up to this cutoff frequency""",
),
7: PinSpecification(
name="scale_right_amplitude",
type_names=["bool"],
optional=True,
- document="""If set to true (default is false),
- 2/field_num_entities scaling will be
- applied, to have right amplitude
- values.""",
+ document=r"""If set to true (default is false), 2/field_num_entities scaling will be applied, to have right amplitude values.""",
),
},
map_output_pin_spec={
@@ -178,17 +166,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Output complex fields container with labels
- matching input fields container. no
- supports binded, but
- prepare_sampling_fft provides it.""",
+ document=r"""Output Complex Fields Container with labels matching input Fields Container. No supports binded, but prepare_sampling_fft provides it.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -197,29 +182,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="fft", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFft:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFft
+ inputs:
+ An instance of InputsFft.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFft:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFft
+ outputs:
+ An instance of OutputsFft.
"""
return super().outputs
@@ -262,14 +254,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._scale_right_amplitude)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container.
+ Field or Fields Container.
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -282,15 +275,15 @@ def field(self):
return self._field
@property
- def scale_forward_transform(self):
- """Allows to connect scale_forward_transform input to the operator.
+ def scale_forward_transform(self) -> Input:
+ r"""Allows to connect scale_forward_transform input to the operator.
- Scale for forward transform, default is
- 2/field_num_elementary_data.
+ Scale for Forward Transform, default is 2/field_num_elementary_data.
- Parameters
- ----------
- my_scale_forward_transform : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -303,14 +296,15 @@ def scale_forward_transform(self):
return self._scale_forward_transform
@property
- def inplace(self):
- """Allows to connect inplace input to the operator.
+ def inplace(self) -> Input:
+ r"""Allows to connect inplace input to the operator.
True if inplace, default is false.
- Parameters
- ----------
- my_inplace : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -323,16 +317,15 @@ def inplace(self):
return self._inplace
@property
- def force_fft_points(self):
- """Allows to connect force_fft_points input to the operator.
+ def force_fft_points(self) -> Input:
+ r"""Allows to connect force_fft_points input to the operator.
- Explicitely define number of fft points to
- either rescope or perform zero
- padding.
+ Explicitely define number of fft points to either rescope or perform zero padding.
- Parameters
- ----------
- my_force_fft_points : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -345,15 +338,15 @@ def force_fft_points(self):
return self._force_fft_points
@property
- def cutoff_frequency(self):
- """Allows to connect cutoff_frequency input to the operator.
+ def cutoff_frequency(self) -> Input:
+ r"""Allows to connect cutoff_frequency input to the operator.
- Restrict output frequency up to this cutoff
- frequency
+ Restrict output frequency up to this cutoff frequency
- Parameters
- ----------
- my_cutoff_frequency : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -366,17 +359,15 @@ def cutoff_frequency(self):
return self._cutoff_frequency
@property
- def scale_right_amplitude(self):
- """Allows to connect scale_right_amplitude input to the operator.
+ def scale_right_amplitude(self) -> Input:
+ r"""Allows to connect scale_right_amplitude input to the operator.
- If set to true (default is false),
- 2/field_num_entities scaling will be
- applied, to have right amplitude
- values.
+ If set to true (default is false), 2/field_num_entities scaling will be applied, to have right amplitude values.
- Parameters
- ----------
- my_scale_right_amplitude : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -407,18 +398,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ Output Complex Fields Container with labels matching input Fields Container. No supports binded, but prepare_sampling_fft provides it.
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mapping.fft()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/mapping/find_reduced_coordinates.py b/src/ansys/dpf/core/operators/mapping/find_reduced_coordinates.py
index a520e879985..f6edf1e5db4 100644
--- a/src/ansys/dpf/core/operators/mapping/find_reduced_coordinates.py
+++ b/src/ansys/dpf/core/operators/mapping/find_reduced_coordinates.py
@@ -4,42 +4,36 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class find_reduced_coordinates(Operator):
- """Finds the elements corresponding to the given coordinates in input and
+ r"""Finds the elements corresponding to the given coordinates in input and
computes their reduced coordinates in those elements.
+
Parameters
----------
- coordinates : Field or FieldsContainer or MeshedRegion or MeshesContainer
- mesh : MeshedRegion or MeshesContainer, optional
- If the first field in input has no mesh in
- support, then the mesh in this pin is
- expected (default is false). if a
- meshes container with several meshes
- is set, it should be on the same
- label spaces as the coordinates
- fields container.
- use_quadratic_elements : bool, optional
- If this pin is set to true, reduced
- coordinates are computed on the
- quadratic element if the element is
- quadratic (more precise but less
- performant). default is false.
+ coordinates: Field or FieldsContainer or MeshedRegion or MeshesContainer
+ mesh: MeshedRegion or MeshesContainer, optional
+ If the first field in input has no mesh in support, then the mesh in this pin is expected (default is false). If a meshes container with several meshes is set, it should be on the same label spaces as the coordinates fields container.
+ use_quadratic_elements: bool, optional
+ If this pin is set to true, reduced coordinates are computed on the quadratic element if the element is quadratic (more precise but less performant). Default is false.
Returns
-------
- reduced_coordinates : FieldsContainer
- Coordinates in the reference elements
- element_ids : ScopingsContainer
- Ids of the elements where each set of reduced
- coordinates is found
+ reduced_coordinates: FieldsContainer
+ coordinates in the reference elements
+ element_ids: ScopingsContainer
+ Ids of the elements where each set of reduced coordinates is found
Examples
--------
@@ -87,9 +81,10 @@ def __init__(
self.inputs.use_quadratic_elements.connect(use_quadratic_elements)
@staticmethod
- def _spec():
- description = """Finds the elements corresponding to the given coordinates in input and
- computes their reduced coordinates in those elements."""
+ def _spec() -> Specification:
+ description = r"""Finds the elements corresponding to the given coordinates in input and
+computes their reduced coordinates in those elements.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -102,29 +97,19 @@ def _spec():
"meshes_container",
],
optional=False,
- document="""""",
+ document=r"""""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""If the first field in input has no mesh in
- support, then the mesh in this pin is
- expected (default is false). if a
- meshes container with several meshes
- is set, it should be on the same
- label spaces as the coordinates
- fields container.""",
+ document=r"""If the first field in input has no mesh in support, then the mesh in this pin is expected (default is false). If a meshes container with several meshes is set, it should be on the same label spaces as the coordinates fields container.""",
),
200: PinSpecification(
name="use_quadratic_elements",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, reduced
- coordinates are computed on the
- quadratic element if the element is
- quadratic (more precise but less
- performant). default is false.""",
+ document=r"""If this pin is set to true, reduced coordinates are computed on the quadratic element if the element is quadratic (more precise but less performant). Default is false.""",
),
},
map_output_pin_spec={
@@ -132,21 +117,20 @@ def _spec():
name="reduced_coordinates",
type_names=["fields_container"],
optional=False,
- document="""Coordinates in the reference elements""",
+ document=r"""coordinates in the reference elements""",
),
1: PinSpecification(
name="element_ids",
type_names=["scopings_container"],
optional=False,
- document="""Ids of the elements where each set of reduced
- coordinates is found""",
+ document=r"""Ids of the elements where each set of reduced coordinates is found""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -155,29 +139,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="find_reduced_coordinates", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFindReducedCoordinates:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFindReducedCoordinates
+ inputs:
+ An instance of InputsFindReducedCoordinates.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFindReducedCoordinates:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFindReducedCoordinates
+ outputs:
+ An instance of OutputsFindReducedCoordinates.
"""
return super().outputs
@@ -212,12 +203,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._use_quadratic_elements)
@property
- def coordinates(self):
- """Allows to connect coordinates input to the operator.
+ def coordinates(self) -> Input:
+ r"""Allows to connect coordinates input to the operator.
- Parameters
- ----------
- my_coordinates : Field or FieldsContainer or MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -230,20 +222,15 @@ def coordinates(self):
return self._coordinates
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- If the first field in input has no mesh in
- support, then the mesh in this pin is
- expected (default is false). if a
- meshes container with several meshes
- is set, it should be on the same
- label spaces as the coordinates
- fields container.
+ If the first field in input has no mesh in support, then the mesh in this pin is expected (default is false). If a meshes container with several meshes is set, it should be on the same label spaces as the coordinates fields container.
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -256,18 +243,15 @@ def mesh(self):
return self._mesh
@property
- def use_quadratic_elements(self):
- """Allows to connect use_quadratic_elements input to the operator.
+ def use_quadratic_elements(self) -> Input:
+ r"""Allows to connect use_quadratic_elements input to the operator.
- If this pin is set to true, reduced
- coordinates are computed on the
- quadratic element if the element is
- quadratic (more precise but less
- performant). default is false.
+ If this pin is set to true, reduced coordinates are computed on the quadratic element if the element is quadratic (more precise but less performant). Default is false.
- Parameters
- ----------
- my_use_quadratic_elements : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -305,35 +289,41 @@ def __init__(self, op: Operator):
self._outputs.append(self._element_ids)
@property
- def reduced_coordinates(self):
- """Allows to get reduced_coordinates output of the operator
+ def reduced_coordinates(self) -> Output:
+ r"""Allows to get reduced_coordinates output of the operator
+
+ coordinates in the reference elements
Returns
- ----------
- my_reduced_coordinates : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mapping.find_reduced_coordinates()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_reduced_coordinates = op.outputs.reduced_coordinates()
- """ # noqa: E501
+ """
return self._reduced_coordinates
@property
- def element_ids(self):
- """Allows to get element_ids output of the operator
+ def element_ids(self) -> Output:
+ r"""Allows to get element_ids output of the operator
+
+ Ids of the elements where each set of reduced coordinates is found
Returns
- ----------
- my_element_ids : ScopingsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mapping.find_reduced_coordinates()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_element_ids = op.outputs.element_ids()
- """ # noqa: E501
+ """
return self._element_ids
diff --git a/src/ansys/dpf/core/operators/mapping/on_coordinates.py b/src/ansys/dpf/core/operators/mapping/on_coordinates.py
index cbe1e04d1e8..295469b5394 100644
--- a/src/ansys/dpf/core/operators/mapping/on_coordinates.py
+++ b/src/ansys/dpf/core/operators/mapping/on_coordinates.py
@@ -4,49 +4,38 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class on_coordinates(Operator):
- """Evaluates a result on specified coordinates (interpolates results
- inside elements with shape functions).
+ r"""Evaluates a result on specified coordinates (interpolates results inside
+ elements with shape functions).
+
Parameters
----------
- fields_container : FieldsContainer
- coordinates : Field or FieldsContainer or MeshedRegion or MeshesContainer
- create_support : bool, optional
- If this pin is set to true, then, a support
- associated to the fields consisting
- of points is created
- mapping_on_scoping : bool, optional
- If this pin is set to true, then the mapping
- between the coordinates and the
- fields is created only on the first
- field scoping
- mesh : MeshedRegion or MeshesContainer, optional
- If the first field in input has no mesh in
- support, then the mesh in this pin is
- expected (default is false), if a
- meshes container with several meshes
- is set, it should be on the same
- label spaces as the coordinates
- fields container
- use_quadratic_elements : bool, optional
- If this pin is set to true, the element
- search for each coordinate is
- computed on the quadratic element if
- the element is quadratic (more
- precise but less performant). default
- is false.
+ fields_container: FieldsContainer
+ coordinates: Field or FieldsContainer or MeshedRegion or MeshesContainer
+ create_support: bool, optional
+ if this pin is set to true, then, a support associated to the fields consisting of points is created
+ mapping_on_scoping: bool, optional
+ if this pin is set to true, then the mapping between the coordinates and the fields is created only on the first field scoping
+ mesh: MeshedRegion or MeshesContainer, optional
+ if the first field in input has no mesh in support, then the mesh in this pin is expected (default is false), if a meshes container with several meshes is set, it should be on the same label spaces as the coordinates fields container
+ use_quadratic_elements: bool, optional
+ If this pin is set to true, the element search for each coordinate is computed on the quadratic element if the element is quadratic (more precise but less performant). Default is false.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -111,9 +100,10 @@ def __init__(
self.inputs.use_quadratic_elements.connect(use_quadratic_elements)
@staticmethod
- def _spec():
- description = """Evaluates a result on specified coordinates (interpolates results
- inside elements with shape functions)."""
+ def _spec() -> Specification:
+ description = r"""Evaluates a result on specified coordinates (interpolates results inside
+elements with shape functions).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -121,7 +111,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="coordinates",
@@ -132,47 +122,31 @@ def _spec():
"meshes_container",
],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="create_support",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, then, a support
- associated to the fields consisting
- of points is created""",
+ document=r"""if this pin is set to true, then, a support associated to the fields consisting of points is created""",
),
3: PinSpecification(
name="mapping_on_scoping",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, then the mapping
- between the coordinates and the
- fields is created only on the first
- field scoping""",
+ document=r"""if this pin is set to true, then the mapping between the coordinates and the fields is created only on the first field scoping""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""If the first field in input has no mesh in
- support, then the mesh in this pin is
- expected (default is false), if a
- meshes container with several meshes
- is set, it should be on the same
- label spaces as the coordinates
- fields container""",
+ document=r"""if the first field in input has no mesh in support, then the mesh in this pin is expected (default is false), if a meshes container with several meshes is set, it should be on the same label spaces as the coordinates fields container""",
),
200: PinSpecification(
name="use_quadratic_elements",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, the element
- search for each coordinate is
- computed on the quadratic element if
- the element is quadratic (more
- precise but less performant). default
- is false.""",
+ document=r"""If this pin is set to true, the element search for each coordinate is computed on the quadratic element if the element is quadratic (more precise but less performant). Default is false.""",
),
},
map_output_pin_spec={
@@ -180,14 +154,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -196,29 +170,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mapping", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsOnCoordinates:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsOnCoordinates
+ inputs:
+ An instance of InputsOnCoordinates.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsOnCoordinates:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsOnCoordinates
+ outputs:
+ An instance of OutputsOnCoordinates.
"""
return super().outputs
@@ -263,12 +244,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._use_quadratic_elements)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -281,12 +263,13 @@ def fields_container(self):
return self._fields_container
@property
- def coordinates(self):
- """Allows to connect coordinates input to the operator.
+ def coordinates(self) -> Input:
+ r"""Allows to connect coordinates input to the operator.
- Parameters
- ----------
- my_coordinates : Field or FieldsContainer or MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -299,16 +282,15 @@ def coordinates(self):
return self._coordinates
@property
- def create_support(self):
- """Allows to connect create_support input to the operator.
+ def create_support(self) -> Input:
+ r"""Allows to connect create_support input to the operator.
- If this pin is set to true, then, a support
- associated to the fields consisting
- of points is created
+ if this pin is set to true, then, a support associated to the fields consisting of points is created
- Parameters
- ----------
- my_create_support : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -321,17 +303,15 @@ def create_support(self):
return self._create_support
@property
- def mapping_on_scoping(self):
- """Allows to connect mapping_on_scoping input to the operator.
+ def mapping_on_scoping(self) -> Input:
+ r"""Allows to connect mapping_on_scoping input to the operator.
- If this pin is set to true, then the mapping
- between the coordinates and the
- fields is created only on the first
- field scoping
+ if this pin is set to true, then the mapping between the coordinates and the fields is created only on the first field scoping
- Parameters
- ----------
- my_mapping_on_scoping : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -344,20 +324,15 @@ def mapping_on_scoping(self):
return self._mapping_on_scoping
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- If the first field in input has no mesh in
- support, then the mesh in this pin is
- expected (default is false), if a
- meshes container with several meshes
- is set, it should be on the same
- label spaces as the coordinates
- fields container
+ if the first field in input has no mesh in support, then the mesh in this pin is expected (default is false), if a meshes container with several meshes is set, it should be on the same label spaces as the coordinates fields container
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -370,19 +345,15 @@ def mesh(self):
return self._mesh
@property
- def use_quadratic_elements(self):
- """Allows to connect use_quadratic_elements input to the operator.
+ def use_quadratic_elements(self) -> Input:
+ r"""Allows to connect use_quadratic_elements input to the operator.
- If this pin is set to true, the element
- search for each coordinate is
- computed on the quadratic element if
- the element is quadratic (more
- precise but less performant). default
- is false.
+ If this pin is set to true, the element search for each coordinate is computed on the quadratic element if the element is quadratic (more precise but less performant). Default is false.
- Parameters
- ----------
- my_use_quadratic_elements : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -413,18 +384,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mapping.on_coordinates()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/mapping/on_reduced_coordinates.py b/src/ansys/dpf/core/operators/mapping/on_reduced_coordinates.py
index 094c471f323..14f23fbab13 100644
--- a/src/ansys/dpf/core/operators/mapping/on_reduced_coordinates.py
+++ b/src/ansys/dpf/core/operators/mapping/on_reduced_coordinates.py
@@ -4,47 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class on_reduced_coordinates(Operator):
- """Evaluates a result on specified reduced coordinates of given elements
+ r"""Evaluates a result on specified reduced coordinates of given elements
(interpolates results inside elements with shape functions).
+
Parameters
----------
- fields_container : FieldsContainer
- reduced_coordinates : Field or FieldsContainer
- Coordinates in the reference elements to find
- (found with the operator
- "find_reduced_coordinates")
- element_ids : ScopingsContainer
- Ids of the elements where each set of reduced
- coordinates is found (found with the
- operator "find_reduced_coordinates")
- mesh : MeshedRegion or MeshesContainer, optional
- If the first field in input has no mesh in
- support, then the mesh in this pin is
- expected (default is false), if a
- meshes container with several meshes
- is set, it should be on the same
- label spaces as the coordinates
- fields container
- use_quadratic_elements : bool, optional
- If this pin is set to true, the interpolation
- is computed on the quadratic element
- if the element is quadratic (more
- precise but less performant). default
- is false. to use only when results
- have mid side nodes values.
+ fields_container: FieldsContainer
+ reduced_coordinates: Field or FieldsContainer
+ coordinates in the reference elements to find (found with the operator "find_reduced_coordinates")
+ element_ids: ScopingsContainer
+ Ids of the elements where each set of reduced coordinates is found (found with the operator "find_reduced_coordinates")
+ mesh: MeshedRegion or MeshesContainer, optional
+ if the first field in input has no mesh in support, then the mesh in this pin is expected (default is false), if a meshes container with several meshes is set, it should be on the same label spaces as the coordinates fields container
+ use_quadratic_elements: bool, optional
+ If this pin is set to true, the interpolation is computed on the quadratic element if the element is quadratic (more precise but less performant). Default is false. To use only when results have mid side nodes values.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -103,10 +93,10 @@ def __init__(
self.inputs.use_quadratic_elements.connect(use_quadratic_elements)
@staticmethod
- def _spec():
- description = """Evaluates a result on specified reduced coordinates of given elements
- (interpolates results inside elements with shape
- functions)."""
+ def _spec() -> Specification:
+ description = r"""Evaluates a result on specified reduced coordinates of given elements
+(interpolates results inside elements with shape functions).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -114,46 +104,31 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="reduced_coordinates",
type_names=["field", "fields_container"],
optional=False,
- document="""Coordinates in the reference elements to find
- (found with the operator
- "find_reduced_coordinates")""",
+ document=r"""coordinates in the reference elements to find (found with the operator "find_reduced_coordinates")""",
),
2: PinSpecification(
name="element_ids",
type_names=["scopings_container"],
optional=False,
- document="""Ids of the elements where each set of reduced
- coordinates is found (found with the
- operator "find_reduced_coordinates")""",
+ document=r"""Ids of the elements where each set of reduced coordinates is found (found with the operator "find_reduced_coordinates")""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""If the first field in input has no mesh in
- support, then the mesh in this pin is
- expected (default is false), if a
- meshes container with several meshes
- is set, it should be on the same
- label spaces as the coordinates
- fields container""",
+ document=r"""if the first field in input has no mesh in support, then the mesh in this pin is expected (default is false), if a meshes container with several meshes is set, it should be on the same label spaces as the coordinates fields container""",
),
200: PinSpecification(
name="use_quadratic_elements",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, the interpolation
- is computed on the quadratic element
- if the element is quadratic (more
- precise but less performant). default
- is false. to use only when results
- have mid side nodes values.""",
+ document=r"""If this pin is set to true, the interpolation is computed on the quadratic element if the element is quadratic (more precise but less performant). Default is false. To use only when results have mid side nodes values.""",
),
},
map_output_pin_spec={
@@ -161,14 +136,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -177,29 +152,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="interpolation_operator", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsOnReducedCoordinates:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsOnReducedCoordinates
+ inputs:
+ An instance of InputsOnReducedCoordinates.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsOnReducedCoordinates:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsOnReducedCoordinates
+ outputs:
+ An instance of OutputsOnReducedCoordinates.
"""
return super().outputs
@@ -246,12 +228,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._use_quadratic_elements)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -264,16 +247,15 @@ def fields_container(self):
return self._fields_container
@property
- def reduced_coordinates(self):
- """Allows to connect reduced_coordinates input to the operator.
+ def reduced_coordinates(self) -> Input:
+ r"""Allows to connect reduced_coordinates input to the operator.
- Coordinates in the reference elements to find
- (found with the operator
- "find_reduced_coordinates")
+ coordinates in the reference elements to find (found with the operator "find_reduced_coordinates")
- Parameters
- ----------
- my_reduced_coordinates : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -286,16 +268,15 @@ def reduced_coordinates(self):
return self._reduced_coordinates
@property
- def element_ids(self):
- """Allows to connect element_ids input to the operator.
+ def element_ids(self) -> Input:
+ r"""Allows to connect element_ids input to the operator.
- Ids of the elements where each set of reduced
- coordinates is found (found with the
- operator "find_reduced_coordinates")
+ Ids of the elements where each set of reduced coordinates is found (found with the operator "find_reduced_coordinates")
- Parameters
- ----------
- my_element_ids : ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -308,20 +289,15 @@ def element_ids(self):
return self._element_ids
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- If the first field in input has no mesh in
- support, then the mesh in this pin is
- expected (default is false), if a
- meshes container with several meshes
- is set, it should be on the same
- label spaces as the coordinates
- fields container
+ if the first field in input has no mesh in support, then the mesh in this pin is expected (default is false), if a meshes container with several meshes is set, it should be on the same label spaces as the coordinates fields container
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -334,19 +310,15 @@ def mesh(self):
return self._mesh
@property
- def use_quadratic_elements(self):
- """Allows to connect use_quadratic_elements input to the operator.
+ def use_quadratic_elements(self) -> Input:
+ r"""Allows to connect use_quadratic_elements input to the operator.
- If this pin is set to true, the interpolation
- is computed on the quadratic element
- if the element is quadratic (more
- precise but less performant). default
- is false. to use only when results
- have mid side nodes values.
+ If this pin is set to true, the interpolation is computed on the quadratic element if the element is quadratic (more precise but less performant). Default is false. To use only when results have mid side nodes values.
- Parameters
- ----------
- my_use_quadratic_elements : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -379,18 +351,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mapping.on_reduced_coordinates()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/mapping/prep_sampling_fft.py b/src/ansys/dpf/core/operators/mapping/prep_sampling_fft.py
index 7d938313c68..a0fad39304f 100644
--- a/src/ansys/dpf/core/operators/mapping/prep_sampling_fft.py
+++ b/src/ansys/dpf/core/operators/mapping/prep_sampling_fft.py
@@ -4,38 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class prep_sampling_fft(Operator):
- """Prepare time sampling optimum for FFT computation and expected
+ r"""Prepare time sampling optimum for FFT computation and expected
frequencies in output.
+
Parameters
----------
- time_freq_support : TimeFreqSupport
- Initial time domain timefreqsupport.
- cutoff_frequency : float, optional
- Cutoff frequency. in this case, number of
- points is calculated computing
- (time_range * cutoff_freq * 2) and
- taking the next power of 2 (optimum
- for fft calculation).
- number_sampling_point : int, optional
- For number of sampling point (calculation
- with cutoff_frequency is ignored).
+ time_freq_support: TimeFreqSupport
+ Initial time domain TimeFreqSupport.
+ cutoff_frequency: float, optional
+ Cutoff Frequency. In this case, number of points is calculated computing (time_range * cutoff_freq * 2) and taking the next power of 2 (optimum for fft calculation).
+ number_sampling_point: int, optional
+ For number of sampling point (calculation with cutoff_frequency is ignored).
Returns
-------
- time_tfs_sampled : TimeFreqSupport
- Optimum sampled time domain timefreqsupport.
- freq_tfs_fft : TimeFreqSupport
- Frequency domain timefreqsupport expected in
- output of fft.
+ time_tfs_sampled: TimeFreqSupport
+ Optimum sampled time domain TimeFreqSupport.
+ freq_tfs_fft: TimeFreqSupport
+ Frequency domain TimeFreqSupport expected in output of FFT.
Examples
--------
@@ -83,9 +82,10 @@ def __init__(
self.inputs.number_sampling_point.connect(number_sampling_point)
@staticmethod
- def _spec():
- description = """Prepare time sampling optimum for FFT computation and expected
- frequencies in output."""
+ def _spec() -> Specification:
+ description = r"""Prepare time sampling optimum for FFT computation and expected
+frequencies in output.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -93,24 +93,19 @@ def _spec():
name="time_freq_support",
type_names=["time_freq_support"],
optional=False,
- document="""Initial time domain timefreqsupport.""",
+ document=r"""Initial time domain TimeFreqSupport.""",
),
1: PinSpecification(
name="cutoff_frequency",
type_names=["double"],
optional=True,
- document="""Cutoff frequency. in this case, number of
- points is calculated computing
- (time_range * cutoff_freq * 2) and
- taking the next power of 2 (optimum
- for fft calculation).""",
+ document=r"""Cutoff Frequency. In this case, number of points is calculated computing (time_range * cutoff_freq * 2) and taking the next power of 2 (optimum for fft calculation).""",
),
2: PinSpecification(
name="number_sampling_point",
type_names=["int32"],
optional=True,
- document="""For number of sampling point (calculation
- with cutoff_frequency is ignored).""",
+ document=r"""For number of sampling point (calculation with cutoff_frequency is ignored).""",
),
},
map_output_pin_spec={
@@ -118,21 +113,20 @@ def _spec():
name="time_tfs_sampled",
type_names=["time_freq_support"],
optional=False,
- document="""Optimum sampled time domain timefreqsupport.""",
+ document=r"""Optimum sampled time domain TimeFreqSupport.""",
),
1: PinSpecification(
name="freq_tfs_fft",
type_names=["time_freq_support"],
optional=False,
- document="""Frequency domain timefreqsupport expected in
- output of fft.""",
+ document=r"""Frequency domain TimeFreqSupport expected in output of FFT.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -141,29 +135,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="prepare_sampling_fft", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsPrepSamplingFft:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsPrepSamplingFft
+ inputs:
+ An instance of InputsPrepSamplingFft.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsPrepSamplingFft:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsPrepSamplingFft
+ outputs:
+ An instance of OutputsPrepSamplingFft.
"""
return super().outputs
@@ -200,14 +201,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._number_sampling_point)
@property
- def time_freq_support(self):
- """Allows to connect time_freq_support input to the operator.
+ def time_freq_support(self) -> Input:
+ r"""Allows to connect time_freq_support input to the operator.
- Initial time domain timefreqsupport.
+ Initial time domain TimeFreqSupport.
- Parameters
- ----------
- my_time_freq_support : TimeFreqSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -220,18 +222,15 @@ def time_freq_support(self):
return self._time_freq_support
@property
- def cutoff_frequency(self):
- """Allows to connect cutoff_frequency input to the operator.
+ def cutoff_frequency(self) -> Input:
+ r"""Allows to connect cutoff_frequency input to the operator.
- Cutoff frequency. in this case, number of
- points is calculated computing
- (time_range * cutoff_freq * 2) and
- taking the next power of 2 (optimum
- for fft calculation).
+ Cutoff Frequency. In this case, number of points is calculated computing (time_range * cutoff_freq * 2) and taking the next power of 2 (optimum for fft calculation).
- Parameters
- ----------
- my_cutoff_frequency : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -244,15 +243,15 @@ def cutoff_frequency(self):
return self._cutoff_frequency
@property
- def number_sampling_point(self):
- """Allows to connect number_sampling_point input to the operator.
+ def number_sampling_point(self) -> Input:
+ r"""Allows to connect number_sampling_point input to the operator.
- For number of sampling point (calculation
- with cutoff_frequency is ignored).
+ For number of sampling point (calculation with cutoff_frequency is ignored).
- Parameters
- ----------
- my_number_sampling_point : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -286,35 +285,41 @@ def __init__(self, op: Operator):
self._outputs.append(self._freq_tfs_fft)
@property
- def time_tfs_sampled(self):
- """Allows to get time_tfs_sampled output of the operator
+ def time_tfs_sampled(self) -> Output:
+ r"""Allows to get time_tfs_sampled output of the operator
+
+ Optimum sampled time domain TimeFreqSupport.
Returns
- ----------
- my_time_tfs_sampled : TimeFreqSupport
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mapping.prep_sampling_fft()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_time_tfs_sampled = op.outputs.time_tfs_sampled()
- """ # noqa: E501
+ """
return self._time_tfs_sampled
@property
- def freq_tfs_fft(self):
- """Allows to get freq_tfs_fft output of the operator
+ def freq_tfs_fft(self) -> Output:
+ r"""Allows to get freq_tfs_fft output of the operator
+
+ Frequency domain TimeFreqSupport expected in output of FFT.
Returns
- ----------
- my_freq_tfs_fft : TimeFreqSupport
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mapping.prep_sampling_fft()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_freq_tfs_fft = op.outputs.freq_tfs_fft()
- """ # noqa: E501
+ """
return self._freq_tfs_fft
diff --git a/src/ansys/dpf/core/operators/mapping/prepare_mapping_workflow.py b/src/ansys/dpf/core/operators/mapping/prepare_mapping_workflow.py
index c31a4baa77b..3761ece7ce3 100644
--- a/src/ansys/dpf/core/operators/mapping/prepare_mapping_workflow.py
+++ b/src/ansys/dpf/core/operators/mapping/prepare_mapping_workflow.py
@@ -4,28 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class prepare_mapping_workflow(Operator):
- """Generates a workflow that can map results from a support to another
- one.
+ r"""Generates a workflow that can map results from a support to another one.
+
Parameters
----------
- input_support : Field or MeshedRegion
- output_support : Field or MeshedRegion
- filter_radius : float
- Radius size for the rbf filter
- influence_box : float, optional
+ input_support: Field or MeshedRegion
+ output_support: Field or MeshedRegion
+ filter_radius: float
+ Radius size for the RBF filter
+ influence_box: float, optional
Returns
-------
- mapping_workflow : Workflow
+ mapping_workflow: Workflow
Examples
--------
@@ -78,9 +82,9 @@ def __init__(
self.inputs.influence_box.connect(influence_box)
@staticmethod
- def _spec():
- description = """Generates a workflow that can map results from a support to another
- one."""
+ def _spec() -> Specification:
+ description = r"""Generates a workflow that can map results from a support to another one.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,25 +92,25 @@ def _spec():
name="input_support",
type_names=["field", "abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="output_support",
type_names=["field", "abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="filter_radius",
type_names=["double"],
optional=False,
- document="""Radius size for the rbf filter""",
+ document=r"""Radius size for the RBF filter""",
),
3: PinSpecification(
name="influence_box",
type_names=["double"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -114,14 +118,14 @@ def _spec():
name="mapping_workflow",
type_names=["workflow"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -130,29 +134,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="prepare_mapping_workflow", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsPrepareMappingWorkflow:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsPrepareMappingWorkflow
+ inputs:
+ An instance of InputsPrepareMappingWorkflow.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsPrepareMappingWorkflow:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsPrepareMappingWorkflow
+ outputs:
+ An instance of OutputsPrepareMappingWorkflow.
"""
return super().outputs
@@ -195,12 +206,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._influence_box)
@property
- def input_support(self):
- """Allows to connect input_support input to the operator.
+ def input_support(self) -> Input:
+ r"""Allows to connect input_support input to the operator.
- Parameters
- ----------
- my_input_support : Field or MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -213,12 +225,13 @@ def input_support(self):
return self._input_support
@property
- def output_support(self):
- """Allows to connect output_support input to the operator.
+ def output_support(self) -> Input:
+ r"""Allows to connect output_support input to the operator.
- Parameters
- ----------
- my_output_support : Field or MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -231,14 +244,15 @@ def output_support(self):
return self._output_support
@property
- def filter_radius(self):
- """Allows to connect filter_radius input to the operator.
+ def filter_radius(self) -> Input:
+ r"""Allows to connect filter_radius input to the operator.
- Radius size for the rbf filter
+ Radius size for the RBF filter
- Parameters
- ----------
- my_filter_radius : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -251,12 +265,13 @@ def filter_radius(self):
return self._filter_radius
@property
- def influence_box(self):
- """Allows to connect influence_box input to the operator.
+ def influence_box(self) -> Input:
+ r"""Allows to connect influence_box input to the operator.
- Parameters
- ----------
- my_influence_box : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -289,18 +304,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._mapping_workflow)
@property
- def mapping_workflow(self):
- """Allows to get mapping_workflow output of the operator
+ def mapping_workflow(self) -> Output:
+ r"""Allows to get mapping_workflow output of the operator
Returns
- ----------
- my_mapping_workflow : Workflow
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mapping.prepare_mapping_workflow()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mapping_workflow = op.outputs.mapping_workflow()
- """ # noqa: E501
+ """
return self._mapping_workflow
diff --git a/src/ansys/dpf/core/operators/mapping/scoping_on_coordinates.py b/src/ansys/dpf/core/operators/mapping/scoping_on_coordinates.py
index 10011a54746..740aa4f9527 100644
--- a/src/ansys/dpf/core/operators/mapping/scoping_on_coordinates.py
+++ b/src/ansys/dpf/core/operators/mapping/scoping_on_coordinates.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class scoping_on_coordinates(Operator):
- """Finds the Elemental scoping of a set of coordinates.
+ r"""Finds the Elemental scoping of a set of coordinates.
+
Parameters
----------
- coordinates : Field
- mesh : MeshedRegion
+ coordinates: Field
+ mesh: MeshedRegion
Returns
-------
- scoping : Scoping
+ scoping: Scoping
Examples
--------
@@ -56,8 +61,9 @@ def __init__(self, coordinates=None, mesh=None, config=None, server=None):
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Finds the Elemental scoping of a set of coordinates."""
+ def _spec() -> Specification:
+ description = r"""Finds the Elemental scoping of a set of coordinates.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,13 +71,13 @@ def _spec():
name="coordinates",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -79,14 +85,14 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -95,29 +101,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="scoping::on_coordinates", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsScopingOnCoordinates:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsScopingOnCoordinates
+ inputs:
+ An instance of InputsScopingOnCoordinates.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsScopingOnCoordinates:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsScopingOnCoordinates
+ outputs:
+ An instance of OutputsScopingOnCoordinates.
"""
return super().outputs
@@ -146,12 +159,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def coordinates(self):
- """Allows to connect coordinates input to the operator.
+ def coordinates(self) -> Input:
+ r"""Allows to connect coordinates input to the operator.
- Parameters
- ----------
- my_coordinates : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -164,12 +178,13 @@ def coordinates(self):
return self._coordinates
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -200,18 +215,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._scoping)
@property
- def scoping(self):
- """Allows to get scoping output of the operator
+ def scoping(self) -> Output:
+ r"""Allows to get scoping output of the operator
Returns
- ----------
- my_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mapping.scoping_on_coordinates()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping = op.outputs.scoping()
- """ # noqa: E501
+ """
return self._scoping
diff --git a/src/ansys/dpf/core/operators/mapping/solid_to_skin.py b/src/ansys/dpf/core/operators/mapping/solid_to_skin.py
index 405a3f549a4..42c0a2f0ed7 100644
--- a/src/ansys/dpf/core/operators/mapping/solid_to_skin.py
+++ b/src/ansys/dpf/core/operators/mapping/solid_to_skin.py
@@ -4,36 +4,39 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class solid_to_skin(Operator):
- """Maps a field defined on solid elements to a field defined on skin
+ r"""Maps a field defined on solid elements to a field defined on skin
elements. Three cases are possible, based on the solid field data
- location; (i) Elemental: The values associated with the solid
- elements are copied according to those underlying the skin, (ii)
- Nodal: The solid field is rescoped with respect to the nodes of
- the skin mesh, (iii) ElementalNodal: The values are copied from
- the solid mesh to the skin mesh for each element face and the
- nodes associated with it.
+ location; (i) Elemental: The values associated with the solid elements
+ are copied according to those underlying the skin, (ii) Nodal: The solid
+ field is rescoped with respect to the nodes of the skin mesh, (iii)
+ ElementalNodal: The values are copied from the solid mesh to the skin
+ mesh for each element face and the nodes associated with it.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- mesh : MeshedRegion
- Skin mesh region expected
- solid_mesh : MeshedRegion, optional
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ mesh: MeshedRegion
+ skin mesh region expected
+ solid_mesh: MeshedRegion, optional
Solid mesh support (optional).
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -75,16 +78,15 @@ def __init__(
self.inputs.solid_mesh.connect(solid_mesh)
@staticmethod
- def _spec():
- description = """Maps a field defined on solid elements to a field defined on skin
- elements. Three cases are possible, based on the solid
- field data location; (i) Elemental: The values associated
- with the solid elements are copied according to those
- underlying the skin, (ii) Nodal: The solid field is
- rescoped with respect to the nodes of the skin mesh, (iii)
- ElementalNodal: The values are copied from the solid mesh
- to the skin mesh for each element face and the nodes
- associated with it."""
+ def _spec() -> Specification:
+ description = r"""Maps a field defined on solid elements to a field defined on skin
+elements. Three cases are possible, based on the solid field data
+location; (i) Elemental: The values associated with the solid elements
+are copied according to those underlying the skin, (ii) Nodal: The solid
+field is rescoped with respect to the nodes of the skin mesh, (iii)
+ElementalNodal: The values are copied from the solid mesh to the skin
+mesh for each element face and the nodes associated with it.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -92,20 +94,19 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""Skin mesh region expected""",
+ document=r"""skin mesh region expected""",
),
2: PinSpecification(
name="solid_mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""Solid mesh support (optional).""",
+ document=r"""Solid mesh support (optional).""",
),
},
map_output_pin_spec={
@@ -113,14 +114,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -129,29 +130,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="solid_to_skin", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSolidToSkin:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSolidToSkin
+ inputs:
+ An instance of InputsSolidToSkin.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSolidToSkin:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSolidToSkin
+ outputs:
+ An instance of OutputsSolidToSkin.
"""
return super().outputs
@@ -182,15 +190,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._solid_mesh)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -203,14 +211,15 @@ def field(self):
return self._field
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Skin mesh region expected
+ skin mesh region expected
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -223,14 +232,15 @@ def mesh(self):
return self._mesh
@property
- def solid_mesh(self):
- """Allows to connect solid_mesh input to the operator.
+ def solid_mesh(self) -> Input:
+ r"""Allows to connect solid_mesh input to the operator.
Solid mesh support (optional).
- Parameters
- ----------
- my_solid_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -261,18 +271,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mapping.solid_to_skin()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/mapping/solid_to_skin_fc.py b/src/ansys/dpf/core/operators/mapping/solid_to_skin_fc.py
index e4824731e7e..781663a9c6b 100644
--- a/src/ansys/dpf/core/operators/mapping/solid_to_skin_fc.py
+++ b/src/ansys/dpf/core/operators/mapping/solid_to_skin_fc.py
@@ -4,36 +4,39 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class solid_to_skin_fc(Operator):
- """Maps a field defined on solid elements to a field defined on skin
+ r"""Maps a field defined on solid elements to a field defined on skin
elements. Three cases are possible, based on the solid field data
- location; (i) Elemental: The values associated with the solid
- elements are copied according to those underlying the skin, (ii)
- Nodal: The solid field is rescoped with respect to the nodes of
- the skin mesh, (iii) ElementalNodal: The values are copied from
- the solid mesh to the skin mesh for each element face and the
- nodes associated with it.
+ location; (i) Elemental: The values associated with the solid elements
+ are copied according to those underlying the skin, (ii) Nodal: The solid
+ field is rescoped with respect to the nodes of the skin mesh, (iii)
+ ElementalNodal: The values are copied from the solid mesh to the skin
+ mesh for each element face and the nodes associated with it.
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
- mesh : MeshedRegion
- Skin mesh region expected
- solid_mesh : MeshedRegion, optional
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
+ mesh: MeshedRegion
+ skin mesh region expected
+ solid_mesh: MeshedRegion, optional
Solid mesh support (optional).
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -80,16 +83,15 @@ def __init__(
self.inputs.solid_mesh.connect(solid_mesh)
@staticmethod
- def _spec():
- description = """Maps a field defined on solid elements to a field defined on skin
- elements. Three cases are possible, based on the solid
- field data location; (i) Elemental: The values associated
- with the solid elements are copied according to those
- underlying the skin, (ii) Nodal: The solid field is
- rescoped with respect to the nodes of the skin mesh, (iii)
- ElementalNodal: The values are copied from the solid mesh
- to the skin mesh for each element face and the nodes
- associated with it."""
+ def _spec() -> Specification:
+ description = r"""Maps a field defined on solid elements to a field defined on skin
+elements. Three cases are possible, based on the solid field data
+location; (i) Elemental: The values associated with the solid elements
+are copied according to those underlying the skin, (ii) Nodal: The solid
+field is rescoped with respect to the nodes of the skin mesh, (iii)
+ElementalNodal: The values are copied from the solid mesh to the skin
+mesh for each element face and the nodes associated with it.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -97,20 +99,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""Skin mesh region expected""",
+ document=r"""skin mesh region expected""",
),
2: PinSpecification(
name="solid_mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""Solid mesh support (optional).""",
+ document=r"""Solid mesh support (optional).""",
),
},
map_output_pin_spec={
@@ -118,14 +119,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -134,29 +135,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="solid_to_skin_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSolidToSkinFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSolidToSkinFc
+ inputs:
+ An instance of InputsSolidToSkinFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSolidToSkinFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSolidToSkinFc
+ outputs:
+ An instance of OutputsSolidToSkinFc.
"""
return super().outputs
@@ -187,15 +195,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._solid_mesh)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -208,14 +216,15 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Skin mesh region expected
+ skin mesh region expected
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -228,14 +237,15 @@ def mesh(self):
return self._mesh
@property
- def solid_mesh(self):
- """Allows to connect solid_mesh input to the operator.
+ def solid_mesh(self) -> Input:
+ r"""Allows to connect solid_mesh input to the operator.
Solid mesh support (optional).
- Parameters
- ----------
- my_solid_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -266,18 +276,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mapping.solid_to_skin_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/absolute_value_by_component.py b/src/ansys/dpf/core/operators/math/absolute_value_by_component.py
index a54d685ba56..42e3ecbff26 100644
--- a/src/ansys/dpf/core/operators/math/absolute_value_by_component.py
+++ b/src/ansys/dpf/core/operators/math/absolute_value_by_component.py
@@ -4,26 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class absolute_value_by_component(Operator):
- """Compute the absolute value of each data value of the input field, no
+ r"""Compute the absolute value of each data value of the input field, no
norm performed.
+
Parameters
----------
- field : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
+ field: Field or FieldsContainer or float
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -55,9 +59,10 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Compute the absolute value of each data value of the input field, no
- norm performed."""
+ def _spec() -> Specification:
+ description = r"""Compute the absolute value of each data value of the input field, no
+norm performed.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -70,8 +75,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -79,14 +83,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -95,31 +99,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="absolute_value_by_component", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsAbsoluteValueByComponent:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAbsoluteValueByComponent
+ inputs:
+ An instance of InputsAbsoluteValueByComponent.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAbsoluteValueByComponent:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAbsoluteValueByComponent
+ outputs:
+ An instance of OutputsAbsoluteValueByComponent.
"""
return super().outputs
@@ -142,15 +153,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -181,18 +192,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.absolute_value_by_component()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/absolute_value_by_component_fc.py b/src/ansys/dpf/core/operators/math/absolute_value_by_component_fc.py
index 42546da7ea4..8c7ec72ffa6 100644
--- a/src/ansys/dpf/core/operators/math/absolute_value_by_component_fc.py
+++ b/src/ansys/dpf/core/operators/math/absolute_value_by_component_fc.py
@@ -4,26 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class absolute_value_by_component_fc(Operator):
- """Compute the absolute value of each data value of the input field, no
+ r"""Compute the absolute value of each data value of the input field, no
norm performed.
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -55,9 +59,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Compute the absolute value of each data value of the input field, no
- norm performed."""
+ def _spec() -> Specification:
+ description = r"""Compute the absolute value of each data value of the input field, no
+norm performed.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,8 +70,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -74,14 +78,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -90,31 +94,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="absolute_value_by_component_fc", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsAbsoluteValueByComponentFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAbsoluteValueByComponentFc
+ inputs:
+ An instance of InputsAbsoluteValueByComponentFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAbsoluteValueByComponentFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAbsoluteValueByComponentFc
+ outputs:
+ An instance of OutputsAbsoluteValueByComponentFc.
"""
return super().outputs
@@ -139,15 +150,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -180,18 +191,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.absolute_value_by_component_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/accumulate.py b/src/ansys/dpf/core/operators/math/accumulate.py
index ca81a6b2ac9..f1d9182b203 100644
--- a/src/ansys/dpf/core/operators/math/accumulate.py
+++ b/src/ansys/dpf/core/operators/math/accumulate.py
@@ -4,33 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class accumulate(Operator):
- """Sums all the elementary data of a field to produce one elementary data
+ r"""Sums all the elementary data of a field to produce one elementary data
point.
+
Parameters
----------
- fieldA : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- ponderation : Field, optional
- Field containing weights, one weight per
- entity
- time_scoping : Scoping, optional
- Time_scoping
+ fieldA: Field or FieldsContainer
+ field or fields container with only one field is expected
+ weights: Field, optional
+ Field containing weights, one weight per entity
+ time_scoping: Scoping, optional
+ time_scoping
Returns
-------
- field : Field
- Field containing the (weighted) sum for each
- component in an elementary data
+ field: Field
+ Field containing the (weighted) sum for each component in an elementary data
Examples
--------
@@ -42,15 +44,15 @@ class accumulate(Operator):
>>> # Make input connections
>>> my_fieldA = dpf.Field()
>>> op.inputs.fieldA.connect(my_fieldA)
- >>> my_ponderation = dpf.Field()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = dpf.Field()
+ >>> op.inputs.weights.connect(my_weights)
>>> my_time_scoping = dpf.Scoping()
>>> op.inputs.time_scoping.connect(my_time_scoping)
>>> # Instantiate operator and connect inputs in one line
>>> op = dpf.operators.math.accumulate(
... fieldA=my_fieldA,
- ... ponderation=my_ponderation,
+ ... weights=my_weights,
... time_scoping=my_time_scoping,
... )
@@ -59,22 +61,31 @@ class accumulate(Operator):
"""
def __init__(
- self, fieldA=None, ponderation=None, time_scoping=None, config=None, server=None
+ self,
+ fieldA=None,
+ weights=None,
+ time_scoping=None,
+ config=None,
+ server=None,
+ ponderation=None,
):
super().__init__(name="accumulate", config=config, server=server)
self._inputs = InputsAccumulate(self)
self._outputs = OutputsAccumulate(self)
if fieldA is not None:
self.inputs.fieldA.connect(fieldA)
- if ponderation is not None:
- self.inputs.ponderation.connect(ponderation)
+ if weights is not None:
+ self.inputs.weights.connect(weights)
+ elif ponderation is not None:
+ self.inputs.weights.connect(ponderation)
if time_scoping is not None:
self.inputs.time_scoping.connect(time_scoping)
@staticmethod
- def _spec():
- description = """Sums all the elementary data of a field to produce one elementary data
- point."""
+ def _spec() -> Specification:
+ description = r"""Sums all the elementary data of a field to produce one elementary data
+point.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -82,21 +93,20 @@ def _spec():
name="fieldA",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
- name="ponderation",
+ name="weights",
type_names=["field"],
optional=True,
- document="""Field containing weights, one weight per
- entity""",
+ document=r"""Field containing weights, one weight per entity""",
+ aliases=["ponderation"],
),
2: PinSpecification(
name="time_scoping",
type_names=["scoping"],
optional=True,
- document="""Time_scoping""",
+ document=r"""time_scoping""",
),
},
map_output_pin_spec={
@@ -104,15 +114,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""Field containing the (weighted) sum for each
- component in an elementary data""",
+ document=r"""Field containing the (weighted) sum for each component in an elementary data""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -121,29 +130,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="accumulate", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAccumulate:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAccumulate
+ inputs:
+ An instance of InputsAccumulate.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAccumulate:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAccumulate
+ outputs:
+ An instance of OutputsAccumulate.
"""
return super().outputs
@@ -158,8 +174,8 @@ class InputsAccumulate(_Inputs):
>>> op = dpf.operators.math.accumulate()
>>> my_fieldA = dpf.Field()
>>> op.inputs.fieldA.connect(my_fieldA)
- >>> my_ponderation = dpf.Field()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = dpf.Field()
+ >>> op.inputs.weights.connect(my_weights)
>>> my_time_scoping = dpf.Scoping()
>>> op.inputs.time_scoping.connect(my_time_scoping)
"""
@@ -168,21 +184,21 @@ def __init__(self, op: Operator):
super().__init__(accumulate._spec().inputs, op)
self._fieldA = Input(accumulate._spec().input_pin(0), 0, op, -1)
self._inputs.append(self._fieldA)
- self._ponderation = Input(accumulate._spec().input_pin(1), 1, op, -1)
- self._inputs.append(self._ponderation)
+ self._weights = Input(accumulate._spec().input_pin(1), 1, op, -1)
+ self._inputs.append(self._weights)
self._time_scoping = Input(accumulate._spec().input_pin(2), 2, op, -1)
self._inputs.append(self._time_scoping)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldA : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -195,35 +211,36 @@ def fieldA(self):
return self._fieldA
@property
- def ponderation(self):
- """Allows to connect ponderation input to the operator.
+ def weights(self) -> Input:
+ r"""Allows to connect weights input to the operator.
- Field containing weights, one weight per
- entity
+ Field containing weights, one weight per entity
- Parameters
- ----------
- my_ponderation : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.accumulate()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> op.inputs.weights.connect(my_weights)
>>> # or
- >>> op.inputs.ponderation(my_ponderation)
+ >>> op.inputs.weights(my_weights)
"""
- return self._ponderation
+ return self._weights
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Time_scoping
+ time_scoping
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -235,6 +252,18 @@ def time_scoping(self):
"""
return self._time_scoping
+ def __getattr__(self, name):
+ if name in ["ponderation"]:
+ warn(
+ DeprecationWarning(
+ f'Operator accumulate: Input name "{name}" is deprecated in favor of "weights".'
+ )
+ )
+ return self.weights
+ raise AttributeError(
+ f"'{self.__class__.__name__}' object has no attribute '{name}'."
+ )
+
class OutputsAccumulate(_Outputs):
"""Intermediate class used to get outputs from
@@ -254,18 +283,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
+
+ Field containing the (weighted) sum for each component in an elementary data
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.accumulate()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/accumulate_fc.py b/src/ansys/dpf/core/operators/math/accumulate_fc.py
index eb3aedcb5a2..4dec7de3ac8 100644
--- a/src/ansys/dpf/core/operators/math/accumulate_fc.py
+++ b/src/ansys/dpf/core/operators/math/accumulate_fc.py
@@ -4,33 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class accumulate_fc(Operator):
- """Sums all the elementary data of a field to produce one elementary data
+ r"""Sums all the elementary data of a field to produce one elementary data
point.
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
- ponderation : Field, optional
- Field containing weights, one weight per
- entity
- time_scoping : Scoping, optional
- Time_scoping
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
+ weights: Field, optional
+ Field containing weights, one weight per entity
+ time_scoping: Scoping, optional
+ time_scoping
Returns
-------
- fields_container : FieldsContainer
- Field containing the (weighted) sum for each
- component in an elementary data
+ fields_container: FieldsContainer
+ Field containing the (weighted) sum for each component in an elementary data
Examples
--------
@@ -42,15 +44,15 @@ class accumulate_fc(Operator):
>>> # Make input connections
>>> my_fields_container = dpf.FieldsContainer()
>>> op.inputs.fields_container.connect(my_fields_container)
- >>> my_ponderation = dpf.Field()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = dpf.Field()
+ >>> op.inputs.weights.connect(my_weights)
>>> my_time_scoping = dpf.Scoping()
>>> op.inputs.time_scoping.connect(my_time_scoping)
>>> # Instantiate operator and connect inputs in one line
>>> op = dpf.operators.math.accumulate_fc(
... fields_container=my_fields_container,
- ... ponderation=my_ponderation,
+ ... weights=my_weights,
... time_scoping=my_time_scoping,
... )
@@ -61,25 +63,29 @@ class accumulate_fc(Operator):
def __init__(
self,
fields_container=None,
- ponderation=None,
+ weights=None,
time_scoping=None,
config=None,
server=None,
+ ponderation=None,
):
super().__init__(name="accumulate_fc", config=config, server=server)
self._inputs = InputsAccumulateFc(self)
self._outputs = OutputsAccumulateFc(self)
if fields_container is not None:
self.inputs.fields_container.connect(fields_container)
- if ponderation is not None:
- self.inputs.ponderation.connect(ponderation)
+ if weights is not None:
+ self.inputs.weights.connect(weights)
+ elif ponderation is not None:
+ self.inputs.weights.connect(ponderation)
if time_scoping is not None:
self.inputs.time_scoping.connect(time_scoping)
@staticmethod
- def _spec():
- description = """Sums all the elementary data of a field to produce one elementary data
- point."""
+ def _spec() -> Specification:
+ description = r"""Sums all the elementary data of a field to produce one elementary data
+point.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -87,21 +93,20 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
- name="ponderation",
+ name="weights",
type_names=["field"],
optional=True,
- document="""Field containing weights, one weight per
- entity""",
+ document=r"""Field containing weights, one weight per entity""",
+ aliases=["ponderation"],
),
2: PinSpecification(
name="time_scoping",
type_names=["scoping"],
optional=True,
- document="""Time_scoping""",
+ document=r"""time_scoping""",
),
},
map_output_pin_spec={
@@ -109,15 +114,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field containing the (weighted) sum for each
- component in an elementary data""",
+ document=r"""Field containing the (weighted) sum for each component in an elementary data""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -126,29 +130,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="accumulate_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAccumulateFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAccumulateFc
+ inputs:
+ An instance of InputsAccumulateFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAccumulateFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAccumulateFc
+ outputs:
+ An instance of OutputsAccumulateFc.
"""
return super().outputs
@@ -163,8 +174,8 @@ class InputsAccumulateFc(_Inputs):
>>> op = dpf.operators.math.accumulate_fc()
>>> my_fields_container = dpf.FieldsContainer()
>>> op.inputs.fields_container.connect(my_fields_container)
- >>> my_ponderation = dpf.Field()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = dpf.Field()
+ >>> op.inputs.weights.connect(my_weights)
>>> my_time_scoping = dpf.Scoping()
>>> op.inputs.time_scoping.connect(my_time_scoping)
"""
@@ -173,21 +184,21 @@ def __init__(self, op: Operator):
super().__init__(accumulate_fc._spec().inputs, op)
self._fields_container = Input(accumulate_fc._spec().input_pin(0), 0, op, -1)
self._inputs.append(self._fields_container)
- self._ponderation = Input(accumulate_fc._spec().input_pin(1), 1, op, -1)
- self._inputs.append(self._ponderation)
+ self._weights = Input(accumulate_fc._spec().input_pin(1), 1, op, -1)
+ self._inputs.append(self._weights)
self._time_scoping = Input(accumulate_fc._spec().input_pin(2), 2, op, -1)
self._inputs.append(self._time_scoping)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -200,35 +211,36 @@ def fields_container(self):
return self._fields_container
@property
- def ponderation(self):
- """Allows to connect ponderation input to the operator.
+ def weights(self) -> Input:
+ r"""Allows to connect weights input to the operator.
- Field containing weights, one weight per
- entity
+ Field containing weights, one weight per entity
- Parameters
- ----------
- my_ponderation : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.accumulate_fc()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> op.inputs.weights.connect(my_weights)
>>> # or
- >>> op.inputs.ponderation(my_ponderation)
+ >>> op.inputs.weights(my_weights)
"""
- return self._ponderation
+ return self._weights
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Time_scoping
+ time_scoping
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -240,6 +252,18 @@ def time_scoping(self):
"""
return self._time_scoping
+ def __getattr__(self, name):
+ if name in ["ponderation"]:
+ warn(
+ DeprecationWarning(
+ f'Operator accumulate_fc: Input name "{name}" is deprecated in favor of "weights".'
+ )
+ )
+ return self.weights
+ raise AttributeError(
+ f"'{self.__class__.__name__}' object has no attribute '{name}'."
+ )
+
class OutputsAccumulateFc(_Outputs):
"""Intermediate class used to get outputs from
@@ -259,18 +283,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ Field containing the (weighted) sum for each component in an elementary data
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.accumulate_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/accumulate_level_over_label_fc.py b/src/ansys/dpf/core/operators/math/accumulate_level_over_label_fc.py
index 937b24aa6ed..9aa0a6c2477 100644
--- a/src/ansys/dpf/core/operators/math/accumulate_level_over_label_fc.py
+++ b/src/ansys/dpf/core/operators/math/accumulate_level_over_label_fc.py
@@ -4,31 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class accumulate_level_over_label_fc(Operator):
- """Compute the component-wise sum over all the fields that have the same
- ID as the label set as input in the fields container and apply
+ r"""Compute the component-wise sum over all the fields that have the same ID
+ as the label set as input in the fields container and apply
10.0xlog10(data/10xx-12) on the result. This computation can be
- incremental. If the input fields container is connected and the
- operator is run multiple times, the output field will be on all
- the connected inputs.
+ incremental. If the input fields container is connected and the operator
+ is run multiple times, the output field will be on all the connected
+ inputs.
+
Parameters
----------
- fields_container : FieldsContainer
- label : str, optional
- Label of the fields container where it should
- operate.
+ fields_container: FieldsContainer
+ label: str, optional
+ Label of the fields container where it should operate.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -65,14 +69,14 @@ def __init__(self, fields_container=None, label=None, config=None, server=None):
self.inputs.label.connect(label)
@staticmethod
- def _spec():
- description = """Compute the component-wise sum over all the fields that have the same
- ID as the label set as input in the fields container and
- apply 10.0xlog10(data/10xx-12) on the result. This
- computation can be incremental. If the input fields
- container is connected and the operator is run multiple
- times, the output field will be on all the connected
- inputs."""
+ def _spec() -> Specification:
+ description = r"""Compute the component-wise sum over all the fields that have the same ID
+as the label set as input in the fields container and apply
+10.0xlog10(data/10xx-12) on the result. This computation can be
+incremental. If the input fields container is connected and the operator
+is run multiple times, the output field will be on all the connected
+inputs.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -80,14 +84,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="label",
type_names=["string"],
optional=True,
- document="""Label of the fields container where it should
- operate.""",
+ document=r"""Label of the fields container where it should operate.""",
),
},
map_output_pin_spec={
@@ -95,14 +98,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -111,31 +114,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="accumulate_level_over_label_fc", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsAccumulateLevelOverLabelFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAccumulateLevelOverLabelFc
+ inputs:
+ An instance of InputsAccumulateLevelOverLabelFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAccumulateLevelOverLabelFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAccumulateLevelOverLabelFc
+ outputs:
+ An instance of OutputsAccumulateLevelOverLabelFc.
"""
return super().outputs
@@ -166,12 +176,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._label)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -184,15 +195,15 @@ def fields_container(self):
return self._fields_container
@property
- def label(self):
- """Allows to connect label input to the operator.
+ def label(self) -> Input:
+ r"""Allows to connect label input to the operator.
- Label of the fields container where it should
- operate.
+ Label of the fields container where it should operate.
- Parameters
- ----------
- my_label : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -225,18 +236,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.accumulate_level_over_label_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/accumulate_min_over_label_fc.py b/src/ansys/dpf/core/operators/math/accumulate_min_over_label_fc.py
index 418622369e9..ebf37f5c548 100644
--- a/src/ansys/dpf/core/operators/math/accumulate_min_over_label_fc.py
+++ b/src/ansys/dpf/core/operators/math/accumulate_min_over_label_fc.py
@@ -4,30 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class accumulate_min_over_label_fc(Operator):
- """Compute the component-wise sum over all the fields that have the same
- ID as the label set as input in the fields container and take its
- opposite. This computation can be incremental. If the input fields
- container is connected and the operator is run multiple times, the
- output field will be on all the connected inputs.
+ r"""Compute the component-wise sum over all the fields that have the same ID
+ as the label set as input in the fields container and take its opposite.
+ This computation can be incremental. If the input fields container is
+ connected and the operator is run multiple times, the output field will
+ be on all the connected inputs.
+
Parameters
----------
- fields_container : FieldsContainer
- label : str, optional
- Label of the fields container where it should
- operate.
+ fields_container: FieldsContainer
+ label: str, optional
+ Label of the fields container where it should operate.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -64,13 +68,13 @@ def __init__(self, fields_container=None, label=None, config=None, server=None):
self.inputs.label.connect(label)
@staticmethod
- def _spec():
- description = """Compute the component-wise sum over all the fields that have the same
- ID as the label set as input in the fields container and
- take its opposite. This computation can be incremental. If
- the input fields container is connected and the operator
- is run multiple times, the output field will be on all the
- connected inputs."""
+ def _spec() -> Specification:
+ description = r"""Compute the component-wise sum over all the fields that have the same ID
+as the label set as input in the fields container and take its opposite.
+This computation can be incremental. If the input fields container is
+connected and the operator is run multiple times, the output field will
+be on all the connected inputs.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -78,14 +82,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="label",
type_names=["string"],
optional=True,
- document="""Label of the fields container where it should
- operate.""",
+ document=r"""Label of the fields container where it should operate.""",
),
},
map_output_pin_spec={
@@ -93,14 +96,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -109,31 +112,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="accumulate_min_over_label_fc", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsAccumulateMinOverLabelFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAccumulateMinOverLabelFc
+ inputs:
+ An instance of InputsAccumulateMinOverLabelFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAccumulateMinOverLabelFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAccumulateMinOverLabelFc
+ outputs:
+ An instance of OutputsAccumulateMinOverLabelFc.
"""
return super().outputs
@@ -164,12 +174,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._label)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -182,15 +193,15 @@ def fields_container(self):
return self._fields_container
@property
- def label(self):
- """Allows to connect label input to the operator.
+ def label(self) -> Input:
+ r"""Allows to connect label input to the operator.
- Label of the fields container where it should
- operate.
+ Label of the fields container where it should operate.
- Parameters
- ----------
- my_label : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -221,18 +232,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.accumulate_min_over_label_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/accumulate_over_label_fc.py b/src/ansys/dpf/core/operators/math/accumulate_over_label_fc.py
index bb3b0e96b4b..ddbe5c272aa 100644
--- a/src/ansys/dpf/core/operators/math/accumulate_over_label_fc.py
+++ b/src/ansys/dpf/core/operators/math/accumulate_over_label_fc.py
@@ -4,30 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class accumulate_over_label_fc(Operator):
- """Compute the component-wise sum over all the fields that have the same
- ID as the label set as input in the fields container. This
- computation can be incremental. If the input fields container is
- connected and the operator is run multiple times, the output field
- will be on all the connected inputs.
+ r"""Compute the component-wise sum over all the fields that have the same ID
+ as the label set as input in the fields container. This computation can
+ be incremental. If the input fields container is connected and the
+ operator is run multiple times, the output field will be on all the
+ connected inputs.
+
Parameters
----------
- fields_container : FieldsContainer
- label : str, optional
- Label of the fields container where it should
- operate.
+ fields_container: FieldsContainer
+ label: str, optional
+ Label of the fields container where it should operate.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -62,13 +66,13 @@ def __init__(self, fields_container=None, label=None, config=None, server=None):
self.inputs.label.connect(label)
@staticmethod
- def _spec():
- description = """Compute the component-wise sum over all the fields that have the same
- ID as the label set as input in the fields container. This
- computation can be incremental. If the input fields
- container is connected and the operator is run multiple
- times, the output field will be on all the connected
- inputs."""
+ def _spec() -> Specification:
+ description = r"""Compute the component-wise sum over all the fields that have the same ID
+as the label set as input in the fields container. This computation can
+be incremental. If the input fields container is connected and the
+operator is run multiple times, the output field will be on all the
+connected inputs.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -76,14 +80,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="label",
type_names=["string"],
optional=True,
- document="""Label of the fields container where it should
- operate.""",
+ document=r"""Label of the fields container where it should operate.""",
),
},
map_output_pin_spec={
@@ -91,14 +94,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -107,29 +110,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="accumulate_over_label_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAccumulateOverLabelFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAccumulateOverLabelFc
+ inputs:
+ An instance of InputsAccumulateOverLabelFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAccumulateOverLabelFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAccumulateOverLabelFc
+ outputs:
+ An instance of OutputsAccumulateOverLabelFc.
"""
return super().outputs
@@ -158,12 +168,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._label)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -176,15 +187,15 @@ def fields_container(self):
return self._fields_container
@property
- def label(self):
- """Allows to connect label input to the operator.
+ def label(self) -> Input:
+ r"""Allows to connect label input to the operator.
- Label of the fields container where it should
- operate.
+ Label of the fields container where it should operate.
- Parameters
- ----------
- my_label : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -215,18 +226,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.accumulate_over_label_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/accumulation_per_scoping.py b/src/ansys/dpf/core/operators/math/accumulation_per_scoping.py
index 6d651d00bbf..6947bf39555 100644
--- a/src/ansys/dpf/core/operators/math/accumulation_per_scoping.py
+++ b/src/ansys/dpf/core/operators/math/accumulation_per_scoping.py
@@ -4,34 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class accumulation_per_scoping(Operator):
- """This operator calculates the sum and the percentage of total sum of
- the input fields container for each scoping of the scopings
- container.
+ r"""This operator calculates the sum and the percentage of total sum of the
+ input fields container for each scoping of the scopings container.
+
Parameters
----------
- fields_container : FieldsContainer
- mesh_scoping : Scoping, optional
- Master scoping. all scopings in the scopings
- container will be intersected with
- this scoping.
- data_sources : DataSources
- scopings_container : ScopingsContainer
- The intersection between the of the first
- will be used.
+ fields_container: FieldsContainer
+ mesh_scoping: Scoping, optional
+ Master scoping. All scopings in the Scopings Container will be intersected with this scoping.
+ data_sources: DataSources
+ scopings_container: ScopingsContainer
+ The intersection between the of the first will be used.
Returns
-------
- accumulation_per_scoping : FieldsContainer
- accumulation_per_scoping_percentage : FieldsContainer
+ accumulation_per_scoping: FieldsContainer
+ accumulation_per_scoping_percentage: FieldsContainer
Examples
--------
@@ -85,10 +86,10 @@ def __init__(
self.inputs.scopings_container.connect(scopings_container)
@staticmethod
- def _spec():
- description = """This operator calculates the sum and the percentage of total sum of
- the input fields container for each scoping of the
- scopings container."""
+ def _spec() -> Specification:
+ description = r"""This operator calculates the sum and the percentage of total sum of the
+input fields container for each scoping of the scopings container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -96,28 +97,25 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""Master scoping. all scopings in the scopings
- container will be intersected with
- this scoping.""",
+ document=r"""Master scoping. All scopings in the Scopings Container will be intersected with this scoping.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""""",
+ document=r"""""",
),
5: PinSpecification(
name="scopings_container",
type_names=["scopings_container"],
optional=False,
- document="""The intersection between the of the first
- will be used.""",
+ document=r"""The intersection between the of the first will be used.""",
),
},
map_output_pin_spec={
@@ -125,20 +123,20 @@ def _spec():
name="accumulation_per_scoping",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="accumulation_per_scoping_percentage",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -147,29 +145,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="accumulation_per_scoping", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAccumulationPerScoping:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAccumulationPerScoping
+ inputs:
+ An instance of InputsAccumulationPerScoping.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAccumulationPerScoping:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAccumulationPerScoping
+ outputs:
+ An instance of OutputsAccumulationPerScoping.
"""
return super().outputs
@@ -212,12 +217,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._scopings_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -230,16 +236,15 @@ def fields_container(self):
return self._fields_container
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Master scoping. all scopings in the scopings
- container will be intersected with
- this scoping.
+ Master scoping. All scopings in the Scopings Container will be intersected with this scoping.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -252,12 +257,13 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -270,15 +276,15 @@ def data_sources(self):
return self._data_sources
@property
- def scopings_container(self):
- """Allows to connect scopings_container input to the operator.
+ def scopings_container(self) -> Input:
+ r"""Allows to connect scopings_container input to the operator.
- The intersection between the of the first
- will be used.
+ The intersection between the of the first will be used.
- Parameters
- ----------
- my_scopings_container : ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -316,35 +322,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._accumulation_per_scoping_percentage)
@property
- def accumulation_per_scoping(self):
- """Allows to get accumulation_per_scoping output of the operator
+ def accumulation_per_scoping(self) -> Output:
+ r"""Allows to get accumulation_per_scoping output of the operator
Returns
- ----------
- my_accumulation_per_scoping : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.accumulation_per_scoping()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_accumulation_per_scoping = op.outputs.accumulation_per_scoping()
- """ # noqa: E501
+ """
return self._accumulation_per_scoping
@property
- def accumulation_per_scoping_percentage(self):
- """Allows to get accumulation_per_scoping_percentage output of the operator
+ def accumulation_per_scoping_percentage(self) -> Output:
+ r"""Allows to get accumulation_per_scoping_percentage output of the operator
Returns
- ----------
- my_accumulation_per_scoping_percentage : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.accumulation_per_scoping()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_accumulation_per_scoping_percentage = op.outputs.accumulation_per_scoping_percentage()
- """ # noqa: E501
+ """
return self._accumulation_per_scoping_percentage
diff --git a/src/ansys/dpf/core/operators/math/add.py b/src/ansys/dpf/core/operators/math/add.py
index 560b738dd03..5aa099def5c 100644
--- a/src/ansys/dpf/core/operators/math/add.py
+++ b/src/ansys/dpf/core/operators/math/add.py
@@ -4,32 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class add(Operator):
- """Computes the sum of two fields. If one field's scoping has 'overall'
- location, then these field's values are applied on the entire
- other field. If one of the input field is empty, the remaining is
- forwarded to the output. When using a constant or 'work_by_index',
- it's possible to use 'inplace' to reuse one of the fields.
+ r"""Computes the sum of two fields. If one field’s scoping has ‘overall’
+ location, then these field’s values are applied on the entire other
+ field. If one of the input field is empty, the remaining is forwarded to
+ the output. When using a constant or ‘work_by_index’, it’s possible to
+ use ‘inplace’ to reuse one of the fields.
+
Parameters
----------
- fieldA : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
- fieldB : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
+ fieldA: Field or FieldsContainer or float
+ field or fields container with only one field is expected
+ fieldB: Field or FieldsContainer or float
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -64,13 +67,13 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = """Computes the sum of two fields. If one field's scoping has 'overall'
- location, then these field's values are applied on the
- entire other field. If one of the input field is empty,
- the remaining is forwarded to the output. When using a
- constant or 'work_by_index', it's possible to use
- 'inplace' to reuse one of the fields."""
+ def _spec() -> Specification:
+ description = r"""Computes the sum of two fields. If one field’s scoping has ‘overall’
+location, then these field’s values are applied on the entire other
+field. If one of the input field is empty, the remaining is forwarded to
+the output. When using a constant or ‘work_by_index’, it’s possible to
+use ‘inplace’ to reuse one of the fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -83,8 +86,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="fieldB",
@@ -95,8 +97,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -104,14 +105,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -120,29 +121,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="add", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAdd:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAdd
+ inputs:
+ An instance of InputsAdd.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAdd:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAdd
+ outputs:
+ An instance of OutputsAdd.
"""
return super().outputs
@@ -169,15 +177,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldA : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -190,15 +198,15 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -229,18 +237,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.add()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/add_constant.py b/src/ansys/dpf/core/operators/math/add_constant.py
index 10d6187c909..142bc144a27 100644
--- a/src/ansys/dpf/core/operators/math/add_constant.py
+++ b/src/ansys/dpf/core/operators/math/add_constant.py
@@ -4,27 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class add_constant(Operator):
- """Computes the sum of a field (in 0) and a scalar (in 1).
+ r"""Computes the sum of a field (in 0) and a scalar (in 1).
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- ponderation : float
- Double or vector of double
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ weights: float
+ double or vector of double
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -36,31 +40,41 @@ class add_constant(Operator):
>>> # Make input connections
>>> my_field = dpf.Field()
>>> op.inputs.field.connect(my_field)
- >>> my_ponderation = float()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = float()
+ >>> op.inputs.weights.connect(my_weights)
>>> # Instantiate operator and connect inputs in one line
>>> op = dpf.operators.math.add_constant(
... field=my_field,
- ... ponderation=my_ponderation,
+ ... weights=my_weights,
... )
>>> # Get output data
>>> result_field = op.outputs.field()
"""
- def __init__(self, field=None, ponderation=None, config=None, server=None):
+ def __init__(
+ self,
+ field=None,
+ weights=None,
+ config=None,
+ server=None,
+ ponderation=None,
+ ):
super().__init__(name="add_constant", config=config, server=server)
self._inputs = InputsAddConstant(self)
self._outputs = OutputsAddConstant(self)
if field is not None:
self.inputs.field.connect(field)
- if ponderation is not None:
- self.inputs.ponderation.connect(ponderation)
+ if weights is not None:
+ self.inputs.weights.connect(weights)
+ elif ponderation is not None:
+ self.inputs.weights.connect(ponderation)
@staticmethod
- def _spec():
- description = """Computes the sum of a field (in 0) and a scalar (in 1)."""
+ def _spec() -> Specification:
+ description = r"""Computes the sum of a field (in 0) and a scalar (in 1).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -68,14 +82,14 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
- name="ponderation",
+ name="weights",
type_names=["double", "vector"],
optional=False,
- document="""Double or vector of double""",
+ document=r"""double or vector of double""",
+ aliases=["ponderation"],
),
},
map_output_pin_spec={
@@ -83,14 +97,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -99,29 +113,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="add_constant", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAddConstant:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAddConstant
+ inputs:
+ An instance of InputsAddConstant.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAddConstant:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAddConstant
+ outputs:
+ An instance of OutputsAddConstant.
"""
return super().outputs
@@ -136,27 +157,27 @@ class InputsAddConstant(_Inputs):
>>> op = dpf.operators.math.add_constant()
>>> my_field = dpf.Field()
>>> op.inputs.field.connect(my_field)
- >>> my_ponderation = float()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = float()
+ >>> op.inputs.weights.connect(my_weights)
"""
def __init__(self, op: Operator):
super().__init__(add_constant._spec().inputs, op)
self._field = Input(add_constant._spec().input_pin(0), 0, op, -1)
self._inputs.append(self._field)
- self._ponderation = Input(add_constant._spec().input_pin(1), 1, op, -1)
- self._inputs.append(self._ponderation)
+ self._weights = Input(add_constant._spec().input_pin(1), 1, op, -1)
+ self._inputs.append(self._weights)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -169,24 +190,37 @@ def field(self):
return self._field
@property
- def ponderation(self):
- """Allows to connect ponderation input to the operator.
+ def weights(self) -> Input:
+ r"""Allows to connect weights input to the operator.
- Double or vector of double
+ double or vector of double
- Parameters
- ----------
- my_ponderation : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.add_constant()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> op.inputs.weights.connect(my_weights)
>>> # or
- >>> op.inputs.ponderation(my_ponderation)
+ >>> op.inputs.weights(my_weights)
"""
- return self._ponderation
+ return self._weights
+
+ def __getattr__(self, name):
+ if name in ["ponderation"]:
+ warn(
+ DeprecationWarning(
+ f'Operator add_constant: Input name "{name}" is deprecated in favor of "weights".'
+ )
+ )
+ return self.weights
+ raise AttributeError(
+ f"'{self.__class__.__name__}' object has no attribute '{name}'."
+ )
class OutputsAddConstant(_Outputs):
@@ -207,18 +241,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.add_constant()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/add_constant_fc.py b/src/ansys/dpf/core/operators/math/add_constant_fc.py
index c614f231c5e..c1992050131 100644
--- a/src/ansys/dpf/core/operators/math/add_constant_fc.py
+++ b/src/ansys/dpf/core/operators/math/add_constant_fc.py
@@ -4,27 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class add_constant_fc(Operator):
- """Computes the sum of a field (in 0) and a scalar (in 1).
+ r"""Computes the sum of a field (in 0) and a scalar (in 1).
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
- ponderation : float
- Double or vector of double
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
+ weights: float
+ double or vector of double
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -36,13 +40,13 @@ class add_constant_fc(Operator):
>>> # Make input connections
>>> my_fields_container = dpf.FieldsContainer()
>>> op.inputs.fields_container.connect(my_fields_container)
- >>> my_ponderation = float()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = float()
+ >>> op.inputs.weights.connect(my_weights)
>>> # Instantiate operator and connect inputs in one line
>>> op = dpf.operators.math.add_constant_fc(
... fields_container=my_fields_container,
- ... ponderation=my_ponderation,
+ ... weights=my_weights,
... )
>>> # Get output data
@@ -50,19 +54,27 @@ class add_constant_fc(Operator):
"""
def __init__(
- self, fields_container=None, ponderation=None, config=None, server=None
+ self,
+ fields_container=None,
+ weights=None,
+ config=None,
+ server=None,
+ ponderation=None,
):
super().__init__(name="add_constant_fc", config=config, server=server)
self._inputs = InputsAddConstantFc(self)
self._outputs = OutputsAddConstantFc(self)
if fields_container is not None:
self.inputs.fields_container.connect(fields_container)
- if ponderation is not None:
- self.inputs.ponderation.connect(ponderation)
+ if weights is not None:
+ self.inputs.weights.connect(weights)
+ elif ponderation is not None:
+ self.inputs.weights.connect(ponderation)
@staticmethod
- def _spec():
- description = """Computes the sum of a field (in 0) and a scalar (in 1)."""
+ def _spec() -> Specification:
+ description = r"""Computes the sum of a field (in 0) and a scalar (in 1).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -70,14 +82,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
- name="ponderation",
+ name="weights",
type_names=["double", "vector"],
optional=False,
- document="""Double or vector of double""",
+ document=r"""double or vector of double""",
+ aliases=["ponderation"],
),
},
map_output_pin_spec={
@@ -85,14 +97,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -101,29 +113,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="add_constant_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAddConstantFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAddConstantFc
+ inputs:
+ An instance of InputsAddConstantFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAddConstantFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAddConstantFc
+ outputs:
+ An instance of OutputsAddConstantFc.
"""
return super().outputs
@@ -138,27 +157,27 @@ class InputsAddConstantFc(_Inputs):
>>> op = dpf.operators.math.add_constant_fc()
>>> my_fields_container = dpf.FieldsContainer()
>>> op.inputs.fields_container.connect(my_fields_container)
- >>> my_ponderation = float()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = float()
+ >>> op.inputs.weights.connect(my_weights)
"""
def __init__(self, op: Operator):
super().__init__(add_constant_fc._spec().inputs, op)
self._fields_container = Input(add_constant_fc._spec().input_pin(0), 0, op, -1)
self._inputs.append(self._fields_container)
- self._ponderation = Input(add_constant_fc._spec().input_pin(1), 1, op, -1)
- self._inputs.append(self._ponderation)
+ self._weights = Input(add_constant_fc._spec().input_pin(1), 1, op, -1)
+ self._inputs.append(self._weights)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -171,24 +190,37 @@ def fields_container(self):
return self._fields_container
@property
- def ponderation(self):
- """Allows to connect ponderation input to the operator.
+ def weights(self) -> Input:
+ r"""Allows to connect weights input to the operator.
- Double or vector of double
+ double or vector of double
- Parameters
- ----------
- my_ponderation : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.add_constant_fc()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> op.inputs.weights.connect(my_weights)
>>> # or
- >>> op.inputs.ponderation(my_ponderation)
+ >>> op.inputs.weights(my_weights)
"""
- return self._ponderation
+ return self._weights
+
+ def __getattr__(self, name):
+ if name in ["ponderation"]:
+ warn(
+ DeprecationWarning(
+ f'Operator add_constant_fc: Input name "{name}" is deprecated in favor of "weights".'
+ )
+ )
+ return self.weights
+ raise AttributeError(
+ f"'{self.__class__.__name__}' object has no attribute '{name}'."
+ )
class OutputsAddConstantFc(_Outputs):
@@ -209,18 +241,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.add_constant_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/add_fc.py b/src/ansys/dpf/core/operators/math/add_fc.py
index 36553d8a13b..7082846e4f4 100644
--- a/src/ansys/dpf/core/operators/math/add_fc.py
+++ b/src/ansys/dpf/core/operators/math/add_fc.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class add_fc(Operator):
- """Selects all fields with the same label space in the input fields
- container and add those together. If fields, doubles, or vectors
- of doubles, are put in input they are added to all the fields.
+ r"""Selects all fields with the same label space in the input fields
+ container and add those together. If fields, doubles, or vectors of
+ doubles, are put in input they are added to all the fields.
+
Parameters
----------
- fields_container1 : FieldsContainer or Field or float
- fields_container2 : FieldsContainer or Field or float
+ fields_container1: FieldsContainer or Field or float
+ fields_container2: FieldsContainer or Field or float
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -60,11 +65,11 @@ def __init__(
self.inputs.fields_container2.connect(fields_container2)
@staticmethod
- def _spec():
- description = """Selects all fields with the same label space in the input fields
- container and add those together. If fields, doubles, or
- vectors of doubles, are put in input they are added to all
- the fields."""
+ def _spec() -> Specification:
+ description = r"""Selects all fields with the same label space in the input fields
+container and add those together. If fields, doubles, or vectors of
+doubles, are put in input they are added to all the fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -77,7 +82,7 @@ def _spec():
"vector",
],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="fields_container",
@@ -88,7 +93,7 @@ def _spec():
"vector",
],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -96,14 +101,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -112,29 +117,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="add_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAddFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAddFc
+ inputs:
+ An instance of InputsAddFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAddFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAddFc
+ outputs:
+ An instance of OutputsAddFc.
"""
return super().outputs
@@ -161,12 +173,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container2)
@property
- def fields_container1(self):
- """Allows to connect fields_container1 input to the operator.
+ def fields_container1(self) -> Input:
+ r"""Allows to connect fields_container1 input to the operator.
- Parameters
- ----------
- my_fields_container1 : FieldsContainer or Field or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -179,12 +192,13 @@ def fields_container1(self):
return self._fields_container1
@property
- def fields_container2(self):
- """Allows to connect fields_container2 input to the operator.
+ def fields_container2(self) -> Input:
+ r"""Allows to connect fields_container2 input to the operator.
- Parameters
- ----------
- my_fields_container2 : FieldsContainer or Field or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -215,18 +229,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.add_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/amplitude.py b/src/ansys/dpf/core/operators/math/amplitude.py
index 8ba8df3f15e..e76bb5d2a71 100644
--- a/src/ansys/dpf/core/operators/math/amplitude.py
+++ b/src/ansys/dpf/core/operators/math/amplitude.py
@@ -4,28 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class amplitude(Operator):
- """Computes amplitude of a real and an imaginary field.
+ r"""Computes amplitude of a real and an imaginary field.
+
Parameters
----------
- fieldA : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- fieldB : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ fieldA: Field or FieldsContainer
+ field or fields container with only one field is expected
+ fieldB: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -60,8 +63,9 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = """Computes amplitude of a real and an imaginary field."""
+ def _spec() -> Specification:
+ description = r"""Computes amplitude of a real and an imaginary field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -69,15 +73,13 @@ def _spec():
name="fieldA",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="fieldB",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -85,14 +87,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -101,29 +103,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="amplitude", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAmplitude:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAmplitude
+ inputs:
+ An instance of InputsAmplitude.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAmplitude:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAmplitude
+ outputs:
+ An instance of OutputsAmplitude.
"""
return super().outputs
@@ -150,15 +159,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldA : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -171,15 +180,15 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -210,18 +219,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.amplitude()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/amplitude_fc.py b/src/ansys/dpf/core/operators/math/amplitude_fc.py
index 846248e3208..dcc6400363a 100644
--- a/src/ansys/dpf/core/operators/math/amplitude_fc.py
+++ b/src/ansys/dpf/core/operators/math/amplitude_fc.py
@@ -4,23 +4,28 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class amplitude_fc(Operator):
- """Computes the amplitude of a real and an imaginary field.
+ r"""Computes the amplitude of a real and an imaginary field.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -50,8 +55,9 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes the amplitude of a real and an imaginary field."""
+ def _spec() -> Specification:
+ description = r"""Computes the amplitude of a real and an imaginary field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -59,7 +65,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -67,14 +73,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -83,29 +89,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="amplitude_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAmplitudeFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAmplitudeFc
+ inputs:
+ An instance of InputsAmplitudeFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAmplitudeFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAmplitudeFc
+ outputs:
+ An instance of OutputsAmplitudeFc.
"""
return super().outputs
@@ -128,12 +141,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -164,18 +178,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.amplitude_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/average_over_label_fc.py b/src/ansys/dpf/core/operators/math/average_over_label_fc.py
index 50247d5a853..2a88b46a422 100644
--- a/src/ansys/dpf/core/operators/math/average_over_label_fc.py
+++ b/src/ansys/dpf/core/operators/math/average_over_label_fc.py
@@ -4,30 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class average_over_label_fc(Operator):
- """Compute the component-wise average over all the fields that have the
+ r"""Compute the component-wise average over all the fields that have the
same ID as the label set as input in the fields container. This
computation can be incremental. If the input fields container is
- connected and the operator is run multiple times, the output field
- will be on all the connected inputs.
+ connected and the operator is run multiple times, the output field will
+ be on all the connected inputs.
+
Parameters
----------
- fields_container : FieldsContainer
- label : str, optional
- Label of the fields container where it should
- operate.
+ fields_container: FieldsContainer
+ label: str, optional
+ Label of the fields container where it should operate.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -62,13 +66,13 @@ def __init__(self, fields_container=None, label=None, config=None, server=None):
self.inputs.label.connect(label)
@staticmethod
- def _spec():
- description = """Compute the component-wise average over all the fields that have the
- same ID as the label set as input in the fields container.
- This computation can be incremental. If the input fields
- container is connected and the operator is run multiple
- times, the output field will be on all the connected
- inputs."""
+ def _spec() -> Specification:
+ description = r"""Compute the component-wise average over all the fields that have the
+same ID as the label set as input in the fields container. This
+computation can be incremental. If the input fields container is
+connected and the operator is run multiple times, the output field will
+be on all the connected inputs.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -76,14 +80,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="label",
type_names=["string"],
optional=True,
- document="""Label of the fields container where it should
- operate.""",
+ document=r"""Label of the fields container where it should operate.""",
),
},
map_output_pin_spec={
@@ -91,14 +94,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -107,29 +110,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="average_over_label_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAverageOverLabelFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAverageOverLabelFc
+ inputs:
+ An instance of InputsAverageOverLabelFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAverageOverLabelFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAverageOverLabelFc
+ outputs:
+ An instance of OutputsAverageOverLabelFc.
"""
return super().outputs
@@ -158,12 +168,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._label)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -176,15 +187,15 @@ def fields_container(self):
return self._fields_container
@property
- def label(self):
- """Allows to connect label input to the operator.
+ def label(self) -> Input:
+ r"""Allows to connect label input to the operator.
- Label of the fields container where it should
- operate.
+ Label of the fields container where it should operate.
- Parameters
- ----------
- my_label : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -215,18 +226,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.average_over_label_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/centroid.py b/src/ansys/dpf/core/operators/math/centroid.py
index 61b977e7424..6d4fa6c8a4e 100644
--- a/src/ansys/dpf/core/operators/math/centroid.py
+++ b/src/ansys/dpf/core/operators/math/centroid.py
@@ -4,31 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class centroid(Operator):
- """Computes centroid of field1 and field2, using fieldOut =
- field1*(1.-fact)+field2*(fact). Only works by index.
+ r"""Computes centroid of field1 and field2, using fieldOut =
+ field1\ *(1.-fact)+field2*\ (fact). Only works by index.
+
Parameters
----------
- fieldA : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- fieldB : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- factor : float
+ fieldA: Field or FieldsContainer
+ field or fields container with only one field is expected
+ fieldB: Field or FieldsContainer
+ field or fields container with only one field is expected
+ factor: float
Scalar
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -68,9 +71,10 @@ def __init__(self, fieldA=None, fieldB=None, factor=None, config=None, server=No
self.inputs.factor.connect(factor)
@staticmethod
- def _spec():
- description = """Computes centroid of field1 and field2, using fieldOut =
- field1*(1.-fact)+field2*(fact). Only works by index."""
+ def _spec() -> Specification:
+ description = r"""Computes centroid of field1 and field2, using fieldOut =
+field1\ *(1.-fact)+field2*\ (fact). Only works by index.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -78,21 +82,19 @@ def _spec():
name="fieldA",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="fieldB",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
2: PinSpecification(
name="factor",
type_names=["double"],
optional=False,
- document="""Scalar""",
+ document=r"""Scalar""",
),
},
map_output_pin_spec={
@@ -100,14 +102,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -116,29 +118,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="centroid", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCentroid:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCentroid
+ inputs:
+ An instance of InputsCentroid.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCentroid:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCentroid
+ outputs:
+ An instance of OutputsCentroid.
"""
return super().outputs
@@ -169,15 +178,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._factor)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldA : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -190,15 +199,15 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -211,14 +220,15 @@ def fieldB(self):
return self._fieldB
@property
- def factor(self):
- """Allows to connect factor input to the operator.
+ def factor(self) -> Input:
+ r"""Allows to connect factor input to the operator.
Scalar
- Parameters
- ----------
- my_factor : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -249,18 +259,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.centroid()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/centroid_fc.py b/src/ansys/dpf/core/operators/math/centroid_fc.py
index d6bf84a8cdb..025ec4706c4 100644
--- a/src/ansys/dpf/core/operators/math/centroid_fc.py
+++ b/src/ansys/dpf/core/operators/math/centroid_fc.py
@@ -4,28 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class centroid_fc(Operator):
- """Computes the centroid of all the matching fields of a fields container
+ r"""Computes the centroid of all the matching fields of a fields container
at a given time or frequency, using fieldOut =
- field1*(1.-fact)+field2*(fact).
+ field1\ *(1.-fact)+field2*\ (fact).
+
Parameters
----------
- fields_container : FieldsContainer
- time_freq : float
- step : int, optional
- time_freq_support : TimeFreqSupport, optional
+ fields_container: FieldsContainer
+ time_freq: float
+ step: int, optional
+ time_freq_support: TimeFreqSupport, optional
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -78,10 +83,11 @@ def __init__(
self.inputs.time_freq_support.connect(time_freq_support)
@staticmethod
- def _spec():
- description = """Computes the centroid of all the matching fields of a fields container
- at a given time or frequency, using fieldOut =
- field1*(1.-fact)+field2*(fact)."""
+ def _spec() -> Specification:
+ description = r"""Computes the centroid of all the matching fields of a fields container
+at a given time or frequency, using fieldOut =
+field1\ *(1.-fact)+field2*\ (fact).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -89,25 +95,25 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="time_freq",
type_names=["double"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="step",
type_names=["int32"],
optional=True,
- document="""""",
+ document=r"""""",
),
8: PinSpecification(
name="time_freq_support",
type_names=["time_freq_support"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -115,14 +121,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -131,29 +137,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="centroid_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCentroidFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCentroidFc
+ inputs:
+ An instance of InputsCentroidFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCentroidFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCentroidFc
+ outputs:
+ An instance of OutputsCentroidFc.
"""
return super().outputs
@@ -188,12 +201,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._time_freq_support)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -206,12 +220,13 @@ def fields_container(self):
return self._fields_container
@property
- def time_freq(self):
- """Allows to connect time_freq input to the operator.
+ def time_freq(self) -> Input:
+ r"""Allows to connect time_freq input to the operator.
- Parameters
- ----------
- my_time_freq : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -224,12 +239,13 @@ def time_freq(self):
return self._time_freq
@property
- def step(self):
- """Allows to connect step input to the operator.
+ def step(self) -> Input:
+ r"""Allows to connect step input to the operator.
- Parameters
- ----------
- my_step : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -242,12 +258,13 @@ def step(self):
return self._step
@property
- def time_freq_support(self):
- """Allows to connect time_freq_support input to the operator.
+ def time_freq_support(self) -> Input:
+ r"""Allows to connect time_freq_support input to the operator.
- Parameters
- ----------
- my_time_freq_support : TimeFreqSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -278,18 +295,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.centroid_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/component_wise_divide.py b/src/ansys/dpf/core/operators/math/component_wise_divide.py
index 428ec0d3b31..6ce6e6a7cb2 100644
--- a/src/ansys/dpf/core/operators/math/component_wise_divide.py
+++ b/src/ansys/dpf/core/operators/math/component_wise_divide.py
@@ -4,32 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class component_wise_divide(Operator):
- """Computes component-wise fraction between two fields of same
- dimensionality. If one field's scoping has an 'overall' location,
- then this field's values are applied on the other field entirely.
- When using a constant or 'work_by_index', you can use 'inplace' to
- reuse one of the fields.
+ r"""Computes component-wise fraction between two fields of same
+ dimensionality. If one field’s scoping has an ‘overall’ location, then
+ this field’s values are applied on the other field entirely. When using
+ a constant or ‘work_by_index’, you can use ‘inplace’ to reuse one of the
+ fields.
+
Parameters
----------
- fieldA : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- fieldB : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ fieldA: Field or FieldsContainer
+ field or fields container with only one field is expected
+ fieldB: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -64,13 +67,13 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = """Computes component-wise fraction between two fields of same
- dimensionality. If one field's scoping has an 'overall'
- location, then this field's values are applied on the
- other field entirely. When using a constant or
- 'work_by_index', you can use 'inplace' to reuse one of the
- fields."""
+ def _spec() -> Specification:
+ description = r"""Computes component-wise fraction between two fields of same
+dimensionality. If one field’s scoping has an ‘overall’ location, then
+this field’s values are applied on the other field entirely. When using
+a constant or ‘work_by_index’, you can use ‘inplace’ to reuse one of the
+fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -78,15 +81,13 @@ def _spec():
name="fieldA",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="fieldB",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -94,14 +95,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -110,29 +111,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="component_wise_divide", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComponentWiseDivide:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComponentWiseDivide
+ inputs:
+ An instance of InputsComponentWiseDivide.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComponentWiseDivide:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComponentWiseDivide
+ outputs:
+ An instance of OutputsComponentWiseDivide.
"""
return super().outputs
@@ -159,15 +167,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldA : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -180,15 +188,15 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -219,18 +227,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.component_wise_divide()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/component_wise_divide_fc.py b/src/ansys/dpf/core/operators/math/component_wise_divide_fc.py
index 8186cfe1eed..270f8335d25 100644
--- a/src/ansys/dpf/core/operators/math/component_wise_divide_fc.py
+++ b/src/ansys/dpf/core/operators/math/component_wise_divide_fc.py
@@ -4,28 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class component_wise_divide_fc(Operator):
- """For every two fields with the same label space (from the two input
- fields containers), computes component-wise fraction between two
- fields of same dimensionality. If one field's scoping has an
- 'overall' location, then this field's values are applied on the
- other field entirely.
+ r"""For every two fields with the same label space (from the two input
+ fields containers), computes component-wise fraction between two fields
+ of same dimensionality. If one field’s scoping has an ‘overall’
+ location, then this field’s values are applied on the other field
+ entirely.
+
Parameters
----------
- fields_containerA : FieldsContainer
- fields_containerB : FieldsContainer
+ fields_containerA: FieldsContainer
+ fields_containerB: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -62,12 +67,13 @@ def __init__(
self.inputs.fields_containerB.connect(fields_containerB)
@staticmethod
- def _spec():
- description = """For every two fields with the same label space (from the two input
- fields containers), computes component-wise fraction
- between two fields of same dimensionality. If one field's
- scoping has an 'overall' location, then this field's
- values are applied on the other field entirely."""
+ def _spec() -> Specification:
+ description = r"""For every two fields with the same label space (from the two input
+fields containers), computes component-wise fraction between two fields
+of same dimensionality. If one field’s scoping has an ‘overall’
+location, then this field’s values are applied on the other field
+entirely.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -75,13 +81,13 @@ def _spec():
name="fields_containerA",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="fields_containerB",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -89,14 +95,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -105,29 +111,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="component_wise_divide_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComponentWiseDivideFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComponentWiseDivideFc
+ inputs:
+ An instance of InputsComponentWiseDivideFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComponentWiseDivideFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComponentWiseDivideFc
+ outputs:
+ An instance of OutputsComponentWiseDivideFc.
"""
return super().outputs
@@ -158,12 +171,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_containerB)
@property
- def fields_containerA(self):
- """Allows to connect fields_containerA input to the operator.
+ def fields_containerA(self) -> Input:
+ r"""Allows to connect fields_containerA input to the operator.
- Parameters
- ----------
- my_fields_containerA : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -176,12 +190,13 @@ def fields_containerA(self):
return self._fields_containerA
@property
- def fields_containerB(self):
- """Allows to connect fields_containerB input to the operator.
+ def fields_containerB(self) -> Input:
+ r"""Allows to connect fields_containerB input to the operator.
- Parameters
- ----------
- my_fields_containerB : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -214,18 +229,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.component_wise_divide_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/component_wise_product.py b/src/ansys/dpf/core/operators/math/component_wise_product.py
index 70f703c3499..d9e2ee66f00 100644
--- a/src/ansys/dpf/core/operators/math/component_wise_product.py
+++ b/src/ansys/dpf/core/operators/math/component_wise_product.py
@@ -4,32 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class component_wise_product(Operator):
- """Computes component-wise product between two fields of same
- dimensionality. If one field's scoping has an 'overall' location,
- then this field's values are applied on the other field entirely.
- When using a constant or 'work_by_index', you can use 'inplace' to
- reuse one of the fields.
+ r"""Computes component-wise product between two fields of same
+ dimensionality. If one field’s scoping has an ‘overall’ location, then
+ this field’s values are applied on the other field entirely. When using
+ a constant or ‘work_by_index’, you can use ‘inplace’ to reuse one of the
+ fields.
+
Parameters
----------
- fieldA : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- fieldB : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ fieldA: Field or FieldsContainer
+ field or fields container with only one field is expected
+ fieldB: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -64,13 +67,13 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = """Computes component-wise product between two fields of same
- dimensionality. If one field's scoping has an 'overall'
- location, then this field's values are applied on the
- other field entirely. When using a constant or
- 'work_by_index', you can use 'inplace' to reuse one of the
- fields."""
+ def _spec() -> Specification:
+ description = r"""Computes component-wise product between two fields of same
+dimensionality. If one field’s scoping has an ‘overall’ location, then
+this field’s values are applied on the other field entirely. When using
+a constant or ‘work_by_index’, you can use ‘inplace’ to reuse one of the
+fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -78,15 +81,13 @@ def _spec():
name="fieldA",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="fieldB",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -94,14 +95,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -110,29 +111,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="component_wise_product", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComponentWiseProduct:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComponentWiseProduct
+ inputs:
+ An instance of InputsComponentWiseProduct.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComponentWiseProduct:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComponentWiseProduct
+ outputs:
+ An instance of OutputsComponentWiseProduct.
"""
return super().outputs
@@ -159,15 +167,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldA : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -180,15 +188,15 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -219,18 +227,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.component_wise_product()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/component_wise_product_fc.py b/src/ansys/dpf/core/operators/math/component_wise_product_fc.py
index 1ded10b300b..d76ac990bc7 100644
--- a/src/ansys/dpf/core/operators/math/component_wise_product_fc.py
+++ b/src/ansys/dpf/core/operators/math/component_wise_product_fc.py
@@ -4,32 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class component_wise_product_fc(Operator):
- """Computes component-wise product between two fields of same
- dimensionality. If one field's scoping has an 'overall' location,
- then this field's values are applied on the other field entirely.
- When using a constant or 'work_by_index', you can use 'inplace' to
- reuse one of the fields.
+ r"""Computes component-wise product between two fields of same
+ dimensionality. If one field’s scoping has an ‘overall’ location, then
+ this field’s values are applied on the other field entirely. When using
+ a constant or ‘work_by_index’, you can use ‘inplace’ to reuse one of the
+ fields.
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
- fieldB : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
+ fieldB: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -64,13 +67,13 @@ def __init__(self, fields_container=None, fieldB=None, config=None, server=None)
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = """Computes component-wise product between two fields of same
- dimensionality. If one field's scoping has an 'overall'
- location, then this field's values are applied on the
- other field entirely. When using a constant or
- 'work_by_index', you can use 'inplace' to reuse one of the
- fields."""
+ def _spec() -> Specification:
+ description = r"""Computes component-wise product between two fields of same
+dimensionality. If one field’s scoping has an ‘overall’ location, then
+this field’s values are applied on the other field entirely. When using
+a constant or ‘work_by_index’, you can use ‘inplace’ to reuse one of the
+fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -78,15 +81,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="fieldB",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -94,14 +95,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -110,29 +111,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="component_wise_product_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComponentWiseProductFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComponentWiseProductFc
+ inputs:
+ An instance of InputsComponentWiseProductFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComponentWiseProductFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComponentWiseProductFc
+ outputs:
+ An instance of OutputsComponentWiseProductFc.
"""
return super().outputs
@@ -161,15 +169,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -182,15 +190,15 @@ def fields_container(self):
return self._fields_container
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -223,18 +231,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.component_wise_product_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/compute_residual_and_error.py b/src/ansys/dpf/core/operators/math/compute_residual_and_error.py
index 83e65eeef49..a08dbbee364 100644
--- a/src/ansys/dpf/core/operators/math/compute_residual_and_error.py
+++ b/src/ansys/dpf/core/operators/math/compute_residual_and_error.py
@@ -4,72 +4,56 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_residual_and_error(Operator):
- """Computes the Lp-norm of a field or a field container.
- When a second entry is provided, the residual (the difference
- between the first and second entry) is calculated along with the
- error as the Lp-norm of the difference.
- When a second input is not provided, the calculation is only
- completed for the first entry.
- The type of calculation performed is based on the specifications
- provided for pin 1, pin 2 defines the type of error norm (L1 vs
- L2), and
- pin 3 which entity to use as a reference
+ r"""Computes the Lp-norm of a field or a field container. When a second
+ entry is provided, the residual (the difference between the first and
+ second entry) is calculated along with the error as the Lp-norm of the
+ difference. When a second input is not provided, the calculation is only
+ completed for the first entry. The type of calculation performed is
+ based on the specifications provided for pin 1, pin 2 defines the type
+ of error norm (L1 vs L2), and pin 3 which entity to use as a reference
+
Parameters
----------
- field_or_fields_container1 : Field or FieldsContainer
- Field or fields container - compulsory
- normalization_type : int, optional
- Type of normalization applied to the
- residuals and norm calculation
- (optional, defaut: absolute):
- 0 for absolute,
- 1 for relative to the first entry at
- a given time step,
- 2 for normalized by the max at a
- given time step of the first entry or
- residuals depending on the reference
- field option,
- 3 for normalized by the max over all
- time steps of the first entry or
- residuals depending on the reference
- field option
- norm_calculation_type : int, optional
- Type for norm calculation (optional, default:
- l2) - it is normalized depending on
- pin2 selection
- 1 for l1, ie sum(abs(xi)),
- 2 for l2, ie sqrt(sum((xi^2))
- field_reference : int, optional
- Field reference for the normalization step,
- default: 0 for entry 1, 1 for
- residuals - optional
- field_or_fields_container2 : Field or FieldsContainer, optional
- Field or fields container of same
- dimensionality as entry 1 - optional
+ field_or_fields_container1: Field or FieldsContainer
+ field or fields container - compulsory
+ normalization_type: int, optional
+ type of normalization applied to the residuals and norm calculation (optional, defaut: absolute):
+ 0 for absolute,
+ 1 for relative to the first entry at a given time step,
+ 2 for normalized by the max at a given time step of the first entry or residuals depending on the reference field option,
+ 3 for normalized by the max over all time steps of the first entry or residuals depending on the reference field option
+ norm_calculation_type: int, optional
+ type for norm calculation (optional, default: L2) - It is normalized depending on Pin2 selection
+ 1 for L1, ie sum(abs(xi)),
+ 2 for L2, ie sqrt(sum((xi^2))
+ field_reference: int, optional
+ Field reference for the normalization step, default: 0 for entry 1, 1 for residuals - optional
+ field_or_fields_container2: Field or FieldsContainer, optional
+ field or fields container of same dimensionality as entry 1 - optional
Returns
-------
- residuals : Field or FieldsContainer
- 0: normalized residuals (aka field 1 - field
- 2) as a field or field container,
- normalized depending on the
- normalization type
- error : Field or FieldsContainer
- 1: error as a field or a field container
- depending on the entry's type.
- residuals_normalization_factor : Field or FieldsContainer
+ residuals: Field or FieldsContainer
+ 0: normalized residuals (aka field 1 - field 2) as a field or field container, normalized depending on the normalization type
+ error: Field or FieldsContainer
+ 1: error as a field or a field container depending on the entry's type.
+ residuals_normalization_factor: Field or FieldsContainer
2: factor used for residual normalization
- error_normalization_factor : Field or FieldsContainer
+ error_normalization_factor: Field or FieldsContainer
3: factor used for error norm normalization
Examples
@@ -132,18 +116,15 @@ def __init__(
self.inputs.field_or_fields_container2.connect(field_or_fields_container2)
@staticmethod
- def _spec():
- description = """Computes the Lp-norm of a field or a field container.
- When a second entry is provided, the residual (the
- difference between the first and second entry) is
- calculated along with the error as the Lp-norm of the
- difference.
- When a second input is not provided, the calculation is
- only completed for the first entry.
- The type of calculation performed is based on the
- specifications provided for pin 1, pin 2 defines the type
- of error norm (L1 vs L2), and
- pin 3 which entity to use as a reference"""
+ def _spec() -> Specification:
+ description = r"""Computes the Lp-norm of a field or a field container. When a second
+entry is provided, the residual (the difference between the first and
+second entry) is calculated along with the error as the Lp-norm of the
+difference. When a second input is not provided, the calculation is only
+completed for the first entry. The type of calculation performed is
+based on the specifications provided for pin 1, pin 2 defines the type
+of error norm (L1 vs L2), and pin 3 which entity to use as a reference
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -151,51 +132,37 @@ def _spec():
name="field_or_fields_container1",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container - compulsory""",
+ document=r"""field or fields container - compulsory""",
),
1: PinSpecification(
name="normalization_type",
type_names=["int32"],
optional=True,
- document="""Type of normalization applied to the
- residuals and norm calculation
- (optional, defaut: absolute):
- 0 for absolute,
- 1 for relative to the first entry at
- a given time step,
- 2 for normalized by the max at a
- given time step of the first entry or
- residuals depending on the reference
- field option,
- 3 for normalized by the max over all
- time steps of the first entry or
- residuals depending on the reference
- field option""",
+ document=r"""type of normalization applied to the residuals and norm calculation (optional, defaut: absolute):
+ 0 for absolute,
+ 1 for relative to the first entry at a given time step,
+ 2 for normalized by the max at a given time step of the first entry or residuals depending on the reference field option,
+ 3 for normalized by the max over all time steps of the first entry or residuals depending on the reference field option""",
),
2: PinSpecification(
name="norm_calculation_type",
type_names=["int32"],
optional=True,
- document="""Type for norm calculation (optional, default:
- l2) - it is normalized depending on
- pin2 selection
- 1 for l1, ie sum(abs(xi)),
- 2 for l2, ie sqrt(sum((xi^2))""",
+ document=r"""type for norm calculation (optional, default: L2) - It is normalized depending on Pin2 selection
+ 1 for L1, ie sum(abs(xi)),
+ 2 for L2, ie sqrt(sum((xi^2))""",
),
3: PinSpecification(
name="field_reference",
type_names=["int32"],
optional=True,
- document="""Field reference for the normalization step,
- default: 0 for entry 1, 1 for
- residuals - optional""",
+ document=r"""Field reference for the normalization step, default: 0 for entry 1, 1 for residuals - optional""",
),
4: PinSpecification(
name="field_or_fields_container2",
type_names=["field", "fields_container"],
optional=True,
- document="""Field or fields container of same
- dimensionality as entry 1 - optional""",
+ document=r"""field or fields container of same dimensionality as entry 1 - optional""",
),
},
map_output_pin_spec={
@@ -203,36 +170,32 @@ def _spec():
name="residuals",
type_names=["field", "fields_container"],
optional=False,
- document="""0: normalized residuals (aka field 1 - field
- 2) as a field or field container,
- normalized depending on the
- normalization type""",
+ document=r"""0: normalized residuals (aka field 1 - field 2) as a field or field container, normalized depending on the normalization type""",
),
1: PinSpecification(
name="error",
type_names=["field", "fields_container"],
optional=False,
- document="""1: error as a field or a field container
- depending on the entry's type.""",
+ document=r"""1: error as a field or a field container depending on the entry's type.""",
),
2: PinSpecification(
name="residuals_normalization_factor",
type_names=["field", "fields_container"],
optional=False,
- document="""2: factor used for residual normalization""",
+ document=r"""2: factor used for residual normalization""",
),
3: PinSpecification(
name="error_normalization_factor",
type_names=["field", "fields_container"],
optional=False,
- document="""3: factor used for error norm normalization""",
+ document=r"""3: factor used for error norm normalization""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -241,29 +204,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="error_norm_calc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeResidualAndError:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeResidualAndError
+ inputs:
+ An instance of InputsComputeResidualAndError.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeResidualAndError:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeResidualAndError
+ outputs:
+ An instance of OutputsComputeResidualAndError.
"""
return super().outputs
@@ -312,14 +282,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field_or_fields_container2)
@property
- def field_or_fields_container1(self):
- """Allows to connect field_or_fields_container1 input to the operator.
+ def field_or_fields_container1(self) -> Input:
+ r"""Allows to connect field_or_fields_container1 input to the operator.
- Field or fields container - compulsory
+ field or fields container - compulsory
- Parameters
- ----------
- my_field_or_fields_container1 : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -332,27 +303,19 @@ def field_or_fields_container1(self):
return self._field_or_fields_container1
@property
- def normalization_type(self):
- """Allows to connect normalization_type input to the operator.
-
- Type of normalization applied to the
- residuals and norm calculation
- (optional, defaut: absolute):
- 0 for absolute,
- 1 for relative to the first entry at
- a given time step,
- 2 for normalized by the max at a
- given time step of the first entry or
- residuals depending on the reference
- field option,
- 3 for normalized by the max over all
- time steps of the first entry or
- residuals depending on the reference
- field option
+ def normalization_type(self) -> Input:
+ r"""Allows to connect normalization_type input to the operator.
- Parameters
- ----------
- my_normalization_type : int
+ type of normalization applied to the residuals and norm calculation (optional, defaut: absolute):
+ 0 for absolute,
+ 1 for relative to the first entry at a given time step,
+ 2 for normalized by the max at a given time step of the first entry or residuals depending on the reference field option,
+ 3 for normalized by the max over all time steps of the first entry or residuals depending on the reference field option
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -365,18 +328,17 @@ def normalization_type(self):
return self._normalization_type
@property
- def norm_calculation_type(self):
- """Allows to connect norm_calculation_type input to the operator.
+ def norm_calculation_type(self) -> Input:
+ r"""Allows to connect norm_calculation_type input to the operator.
- Type for norm calculation (optional, default:
- l2) - it is normalized depending on
- pin2 selection
- 1 for l1, ie sum(abs(xi)),
- 2 for l2, ie sqrt(sum((xi^2))
+ type for norm calculation (optional, default: L2) - It is normalized depending on Pin2 selection
+ 1 for L1, ie sum(abs(xi)),
+ 2 for L2, ie sqrt(sum((xi^2))
- Parameters
- ----------
- my_norm_calculation_type : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -389,16 +351,15 @@ def norm_calculation_type(self):
return self._norm_calculation_type
@property
- def field_reference(self):
- """Allows to connect field_reference input to the operator.
+ def field_reference(self) -> Input:
+ r"""Allows to connect field_reference input to the operator.
- Field reference for the normalization step,
- default: 0 for entry 1, 1 for
- residuals - optional
+ Field reference for the normalization step, default: 0 for entry 1, 1 for residuals - optional
- Parameters
- ----------
- my_field_reference : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -411,15 +372,15 @@ def field_reference(self):
return self._field_reference
@property
- def field_or_fields_container2(self):
- """Allows to connect field_or_fields_container2 input to the operator.
+ def field_or_fields_container2(self) -> Input:
+ r"""Allows to connect field_or_fields_container2 input to the operator.
- Field or fields container of same
- dimensionality as entry 1 - optional
+ field or fields container of same dimensionality as entry 1 - optional
- Parameters
- ----------
- my_field_or_fields_container2 : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
diff --git a/src/ansys/dpf/core/operators/math/conjugate.py b/src/ansys/dpf/core/operators/math/conjugate.py
index d8b80ab4606..d454e7ac1ad 100644
--- a/src/ansys/dpf/core/operators/math/conjugate.py
+++ b/src/ansys/dpf/core/operators/math/conjugate.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class conjugate(Operator):
- """Computes element-wise conjugate of field containers containing complex
+ r"""Computes element-wise conjugate of field containers containing complex
fields.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -51,9 +56,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes element-wise conjugate of field containers containing complex
- fields."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise conjugate of field containers containing complex
+fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,7 +67,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -69,14 +75,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -85,29 +91,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="conjugate", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsConjugate:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsConjugate
+ inputs:
+ An instance of InputsConjugate.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsConjugate:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsConjugate
+ outputs:
+ An instance of OutputsConjugate.
"""
return super().outputs
@@ -130,12 +143,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -166,18 +180,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.conjugate()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/correlation.py b/src/ansys/dpf/core/operators/math/correlation.py
index 289d2967da8..e6a798f0876 100644
--- a/src/ansys/dpf/core/operators/math/correlation.py
+++ b/src/ansys/dpf/core/operators/math/correlation.py
@@ -4,41 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class correlation(Operator):
- """Takes two fields and a weighting and computes their correlation:
- aMb/(||aMa||.||bMb||). If several b fields are provided (via a
- fields container), correlation is computed for each of them.
+ r"""Takes two fields and a weighting and computes their correlation:
+ aMb/(\||aMa|\|.||bMb|\|). If several b fields are provided (via a fields
+ container), correlation is computed for each of them.
+
Parameters
----------
- fieldA : Field or float
- Field a. the reference field.
- fieldB : Field or FieldsContainer
- Field b. if a fields container is provided,
- correlation is computed for each
- field.
- ponderation : Field or FieldsContainer
- Field m, optional weighting for correlation
- computation.
- absoluteValue : bool
- If true, correlation factor is
- ||amb||/(||ama||.||bmb||)
+ fieldA: Field or float
+ Field a. The reference field.
+ fieldB: Field or FieldsContainer
+ Field b. If a fields container is provided, correlation is computed for each field.
+ weights: Field or FieldsContainer
+ Field M, optional weighting for correlation computation.
+ absoluteValue: bool
+ If true, correlation factor is ||aMb||/(||aMa||.||bMb||)
Returns
-------
- field : Field
+ field: Field
Correlation factor for each input field b.
- index : int
- If several b are provided, this output
- contains the index of the highest
- correlation factor.
+ index: int
+ If several b are provided, this output contains the index of the highest correlation factor.
Examples
--------
@@ -52,8 +51,8 @@ class correlation(Operator):
>>> op.inputs.fieldA.connect(my_fieldA)
>>> my_fieldB = dpf.Field()
>>> op.inputs.fieldB.connect(my_fieldB)
- >>> my_ponderation = dpf.Field()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = dpf.Field()
+ >>> op.inputs.weights.connect(my_weights)
>>> my_absoluteValue = bool()
>>> op.inputs.absoluteValue.connect(my_absoluteValue)
@@ -61,7 +60,7 @@ class correlation(Operator):
>>> op = dpf.operators.math.correlation(
... fieldA=my_fieldA,
... fieldB=my_fieldB,
- ... ponderation=my_ponderation,
+ ... weights=my_weights,
... absoluteValue=my_absoluteValue,
... )
@@ -74,10 +73,11 @@ def __init__(
self,
fieldA=None,
fieldB=None,
- ponderation=None,
+ weights=None,
absoluteValue=None,
config=None,
server=None,
+ ponderation=None,
):
super().__init__(name="correlation", config=config, server=server)
self._inputs = InputsCorrelation(self)
@@ -86,17 +86,19 @@ def __init__(
self.inputs.fieldA.connect(fieldA)
if fieldB is not None:
self.inputs.fieldB.connect(fieldB)
- if ponderation is not None:
- self.inputs.ponderation.connect(ponderation)
+ if weights is not None:
+ self.inputs.weights.connect(weights)
+ elif ponderation is not None:
+ self.inputs.weights.connect(ponderation)
if absoluteValue is not None:
self.inputs.absoluteValue.connect(absoluteValue)
@staticmethod
- def _spec():
- description = """Takes two fields and a weighting and computes their correlation:
- aMb/(||aMa||.||bMb||). If several b fields are provided
- (via a fields container), correlation is computed for each
- of them."""
+ def _spec() -> Specification:
+ description = r"""Takes two fields and a weighting and computes their correlation:
+aMb/(\||aMa|\|.||bMb|\|). If several b fields are provided (via a fields
+container), correlation is computed for each of them.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -104,29 +106,26 @@ def _spec():
name="fieldA",
type_names=["field", "double", "vector"],
optional=False,
- document="""Field a. the reference field.""",
+ document=r"""Field a. The reference field.""",
),
1: PinSpecification(
name="fieldB",
type_names=["field", "fields_container"],
optional=False,
- document="""Field b. if a fields container is provided,
- correlation is computed for each
- field.""",
+ document=r"""Field b. If a fields container is provided, correlation is computed for each field.""",
),
2: PinSpecification(
- name="ponderation",
+ name="weights",
type_names=["field", "fields_container"],
optional=False,
- document="""Field m, optional weighting for correlation
- computation.""",
+ document=r"""Field M, optional weighting for correlation computation.""",
+ aliases=["ponderation"],
),
3: PinSpecification(
name="absoluteValue",
type_names=["bool"],
optional=False,
- document="""If true, correlation factor is
- ||amb||/(||ama||.||bmb||)""",
+ document=r"""If true, correlation factor is ||aMb||/(||aMa||.||bMb||)""",
),
},
map_output_pin_spec={
@@ -134,22 +133,20 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""Correlation factor for each input field b.""",
+ document=r"""Correlation factor for each input field b.""",
),
1: PinSpecification(
name="index",
type_names=["int32"],
optional=False,
- document="""If several b are provided, this output
- contains the index of the highest
- correlation factor.""",
+ document=r"""If several b are provided, this output contains the index of the highest correlation factor.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -158,29 +155,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="correlation", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCorrelation:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCorrelation
+ inputs:
+ An instance of InputsCorrelation.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCorrelation:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCorrelation
+ outputs:
+ An instance of OutputsCorrelation.
"""
return super().outputs
@@ -197,8 +201,8 @@ class InputsCorrelation(_Inputs):
>>> op.inputs.fieldA.connect(my_fieldA)
>>> my_fieldB = dpf.Field()
>>> op.inputs.fieldB.connect(my_fieldB)
- >>> my_ponderation = dpf.Field()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = dpf.Field()
+ >>> op.inputs.weights.connect(my_weights)
>>> my_absoluteValue = bool()
>>> op.inputs.absoluteValue.connect(my_absoluteValue)
"""
@@ -209,20 +213,21 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldA)
self._fieldB = Input(correlation._spec().input_pin(1), 1, op, -1)
self._inputs.append(self._fieldB)
- self._ponderation = Input(correlation._spec().input_pin(2), 2, op, -1)
- self._inputs.append(self._ponderation)
+ self._weights = Input(correlation._spec().input_pin(2), 2, op, -1)
+ self._inputs.append(self._weights)
self._absoluteValue = Input(correlation._spec().input_pin(3), 3, op, -1)
self._inputs.append(self._absoluteValue)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field a. the reference field.
+ Field a. The reference field.
- Parameters
- ----------
- my_fieldA : Field or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -235,16 +240,15 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field b. if a fields container is provided,
- correlation is computed for each
- field.
+ Field b. If a fields container is provided, correlation is computed for each field.
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -257,36 +261,36 @@ def fieldB(self):
return self._fieldB
@property
- def ponderation(self):
- """Allows to connect ponderation input to the operator.
+ def weights(self) -> Input:
+ r"""Allows to connect weights input to the operator.
- Field m, optional weighting for correlation
- computation.
+ Field M, optional weighting for correlation computation.
- Parameters
- ----------
- my_ponderation : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.correlation()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> op.inputs.weights.connect(my_weights)
>>> # or
- >>> op.inputs.ponderation(my_ponderation)
+ >>> op.inputs.weights(my_weights)
"""
- return self._ponderation
+ return self._weights
@property
- def absoluteValue(self):
- """Allows to connect absoluteValue input to the operator.
+ def absoluteValue(self) -> Input:
+ r"""Allows to connect absoluteValue input to the operator.
- If true, correlation factor is
- ||amb||/(||ama||.||bmb||)
+ If true, correlation factor is ||aMb||/(||aMa||.||bMb||)
- Parameters
- ----------
- my_absoluteValue : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -298,6 +302,18 @@ def absoluteValue(self):
"""
return self._absoluteValue
+ def __getattr__(self, name):
+ if name in ["ponderation"]:
+ warn(
+ DeprecationWarning(
+ f'Operator correlation: Input name "{name}" is deprecated in favor of "weights".'
+ )
+ )
+ return self.weights
+ raise AttributeError(
+ f"'{self.__class__.__name__}' object has no attribute '{name}'."
+ )
+
class OutputsCorrelation(_Outputs):
"""Intermediate class used to get outputs from
@@ -320,35 +336,41 @@ def __init__(self, op: Operator):
self._outputs.append(self._index)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
+
+ Correlation factor for each input field b.
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.correlation()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
@property
- def index(self):
- """Allows to get index output of the operator
+ def index(self) -> Output:
+ r"""Allows to get index output of the operator
+
+ If several b are provided, this output contains the index of the highest correlation factor.
Returns
- ----------
- my_index : int
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.correlation()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_index = op.outputs.index()
- """ # noqa: E501
+ """
return self._index
diff --git a/src/ansys/dpf/core/operators/math/cos.py b/src/ansys/dpf/core/operators/math/cos.py
index 85aabdb0513..95cabb495ee 100644
--- a/src/ansys/dpf/core/operators/math/cos.py
+++ b/src/ansys/dpf/core/operators/math/cos.py
@@ -4,25 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cos(Operator):
- """Computes element-wise cos(field[i]).
+ r"""Computes element-wise cos(field[i]).
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -52,8 +56,9 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Computes element-wise cos(field[i])."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise cos(field[i]).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,8 +66,7 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -70,14 +74,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -86,29 +90,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cos", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCos:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCos
+ inputs:
+ An instance of InputsCos.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCos:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCos
+ outputs:
+ An instance of OutputsCos.
"""
return super().outputs
@@ -131,15 +142,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,18 +181,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.cos()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/cos_fc.py b/src/ansys/dpf/core/operators/math/cos_fc.py
index 516750ed418..e149dd33f4b 100644
--- a/src/ansys/dpf/core/operators/math/cos_fc.py
+++ b/src/ansys/dpf/core/operators/math/cos_fc.py
@@ -4,25 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cos_fc(Operator):
- """Computes element-wise cos(field[i]).
+ r"""Computes element-wise cos(field[i]).
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -52,8 +56,9 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes element-wise cos(field[i])."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise cos(field[i]).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,8 +66,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -70,14 +74,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -86,29 +90,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cos_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCosFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCosFc
+ inputs:
+ An instance of InputsCosFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCosFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCosFc
+ outputs:
+ An instance of OutputsCosFc.
"""
return super().outputs
@@ -131,15 +142,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,18 +181,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.cos_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/cplx_derive.py b/src/ansys/dpf/core/operators/math/cplx_derive.py
index 3490c7f6345..745d2b299ba 100644
--- a/src/ansys/dpf/core/operators/math/cplx_derive.py
+++ b/src/ansys/dpf/core/operators/math/cplx_derive.py
@@ -4,23 +4,28 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cplx_derive(Operator):
- """Derives field containers containing complex fields.
+ r"""Derives field containers containing complex fields.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -50,8 +55,9 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Derives field containers containing complex fields."""
+ def _spec() -> Specification:
+ description = r"""Derives field containers containing complex fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -59,7 +65,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -67,14 +73,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -83,29 +89,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cplx_derive", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCplxDerive:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCplxDerive
+ inputs:
+ An instance of InputsCplxDerive.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCplxDerive:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCplxDerive
+ outputs:
+ An instance of OutputsCplxDerive.
"""
return super().outputs
@@ -128,12 +141,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -164,18 +178,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.cplx_derive()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/cplx_divide.py b/src/ansys/dpf/core/operators/math/cplx_divide.py
index 5941b232afc..7ed18319481 100644
--- a/src/ansys/dpf/core/operators/math/cplx_divide.py
+++ b/src/ansys/dpf/core/operators/math/cplx_divide.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cplx_divide(Operator):
- """Computes division between two field containers containing complex
+ r"""Computes division between two field containers containing complex
fields.
+
Parameters
----------
- fields_containerA : FieldsContainer
- fields_containerB : FieldsContainer
+ fields_containerA: FieldsContainer
+ fields_containerB: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -59,9 +64,10 @@ def __init__(
self.inputs.fields_containerB.connect(fields_containerB)
@staticmethod
- def _spec():
- description = """Computes division between two field containers containing complex
- fields."""
+ def _spec() -> Specification:
+ description = r"""Computes division between two field containers containing complex
+fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -69,13 +75,13 @@ def _spec():
name="fields_containerA",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="fields_containerB",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -83,14 +89,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -99,29 +105,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cplx_divide", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCplxDivide:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCplxDivide
+ inputs:
+ An instance of InputsCplxDivide.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCplxDivide:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCplxDivide
+ outputs:
+ An instance of OutputsCplxDivide.
"""
return super().outputs
@@ -148,12 +161,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_containerB)
@property
- def fields_containerA(self):
- """Allows to connect fields_containerA input to the operator.
+ def fields_containerA(self) -> Input:
+ r"""Allows to connect fields_containerA input to the operator.
- Parameters
- ----------
- my_fields_containerA : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -166,12 +180,13 @@ def fields_containerA(self):
return self._fields_containerA
@property
- def fields_containerB(self):
- """Allows to connect fields_containerB input to the operator.
+ def fields_containerB(self) -> Input:
+ r"""Allows to connect fields_containerB input to the operator.
- Parameters
- ----------
- my_fields_containerB : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -202,18 +217,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.cplx_divide()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/cplx_dot.py b/src/ansys/dpf/core/operators/math/cplx_dot.py
index 544b7dab523..eb70c6c3b66 100644
--- a/src/ansys/dpf/core/operators/math/cplx_dot.py
+++ b/src/ansys/dpf/core/operators/math/cplx_dot.py
@@ -4,25 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cplx_dot(Operator):
- """Computes product between two field containers containing complex
- fields.
+ r"""Computes product between two field containers containing complex fields.
+
Parameters
----------
- fields_containerA : FieldsContainer
- fields_containerB : FieldsContainer
+ fields_containerA: FieldsContainer
+ fields_containerB: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -59,9 +63,9 @@ def __init__(
self.inputs.fields_containerB.connect(fields_containerB)
@staticmethod
- def _spec():
- description = """Computes product between two field containers containing complex
- fields."""
+ def _spec() -> Specification:
+ description = r"""Computes product between two field containers containing complex fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -69,13 +73,13 @@ def _spec():
name="fields_containerA",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="fields_containerB",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -83,14 +87,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -99,29 +103,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cplx_dot", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCplxDot:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCplxDot
+ inputs:
+ An instance of InputsCplxDot.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCplxDot:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCplxDot
+ outputs:
+ An instance of OutputsCplxDot.
"""
return super().outputs
@@ -148,12 +159,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_containerB)
@property
- def fields_containerA(self):
- """Allows to connect fields_containerA input to the operator.
+ def fields_containerA(self) -> Input:
+ r"""Allows to connect fields_containerA input to the operator.
- Parameters
- ----------
- my_fields_containerA : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -166,12 +178,13 @@ def fields_containerA(self):
return self._fields_containerA
@property
- def fields_containerB(self):
- """Allows to connect fields_containerB input to the operator.
+ def fields_containerB(self) -> Input:
+ r"""Allows to connect fields_containerB input to the operator.
- Parameters
- ----------
- my_fields_containerB : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -202,18 +215,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.cplx_dot()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/cplx_multiply.py b/src/ansys/dpf/core/operators/math/cplx_multiply.py
index 4614e3428f4..7437e839428 100644
--- a/src/ansys/dpf/core/operators/math/cplx_multiply.py
+++ b/src/ansys/dpf/core/operators/math/cplx_multiply.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cplx_multiply(Operator):
- """Computes multiplication between two field containers containing
- complex fields.
+ r"""Computes multiplication between two field containers containing complex
+ fields.
+
Parameters
----------
- fields_containerA : FieldsContainer
- fields_containerB : FieldsContainer
+ fields_containerA: FieldsContainer
+ fields_containerB: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -59,9 +64,10 @@ def __init__(
self.inputs.fields_containerB.connect(fields_containerB)
@staticmethod
- def _spec():
- description = """Computes multiplication between two field containers containing
- complex fields."""
+ def _spec() -> Specification:
+ description = r"""Computes multiplication between two field containers containing complex
+fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -69,13 +75,13 @@ def _spec():
name="fields_containerA",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="fields_containerB",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -83,14 +89,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -99,29 +105,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cplx_multiply", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCplxMultiply:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCplxMultiply
+ inputs:
+ An instance of InputsCplxMultiply.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCplxMultiply:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCplxMultiply
+ outputs:
+ An instance of OutputsCplxMultiply.
"""
return super().outputs
@@ -148,12 +161,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_containerB)
@property
- def fields_containerA(self):
- """Allows to connect fields_containerA input to the operator.
+ def fields_containerA(self) -> Input:
+ r"""Allows to connect fields_containerA input to the operator.
- Parameters
- ----------
- my_fields_containerA : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -166,12 +180,13 @@ def fields_containerA(self):
return self._fields_containerA
@property
- def fields_containerB(self):
- """Allows to connect fields_containerB input to the operator.
+ def fields_containerB(self) -> Input:
+ r"""Allows to connect fields_containerB input to the operator.
- Parameters
- ----------
- my_fields_containerB : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -202,18 +217,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.cplx_multiply()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/cross_product.py b/src/ansys/dpf/core/operators/math/cross_product.py
index c487c343967..c241adcf5db 100644
--- a/src/ansys/dpf/core/operators/math/cross_product.py
+++ b/src/ansys/dpf/core/operators/math/cross_product.py
@@ -4,29 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cross_product(Operator):
- """Computes the cross product of two vector fields. Fields can have the
+ r"""Computes the cross product of two vector fields. Fields can have the
same location or Elemental Nodal and Nodal locations.
+
Parameters
----------
- fieldA : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
- fieldB : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
+ fieldA: Field or FieldsContainer or float
+ field or fields container with only one field is expected
+ fieldB: Field or FieldsContainer or float
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -61,9 +64,10 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = """Computes the cross product of two vector fields. Fields can have the
- same location or Elemental Nodal and Nodal locations."""
+ def _spec() -> Specification:
+ description = r"""Computes the cross product of two vector fields. Fields can have the
+same location or Elemental Nodal and Nodal locations.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -76,8 +80,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="fieldB",
@@ -88,8 +91,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -97,14 +99,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -113,29 +115,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cross_product", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCrossProduct:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCrossProduct
+ inputs:
+ An instance of InputsCrossProduct.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCrossProduct:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCrossProduct
+ outputs:
+ An instance of OutputsCrossProduct.
"""
return super().outputs
@@ -162,15 +171,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldA : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -183,15 +192,15 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -222,18 +231,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.cross_product()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/cross_product_fc.py b/src/ansys/dpf/core/operators/math/cross_product_fc.py
index 8c8eee0961e..ea4669f12ee 100644
--- a/src/ansys/dpf/core/operators/math/cross_product_fc.py
+++ b/src/ansys/dpf/core/operators/math/cross_product_fc.py
@@ -4,29 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cross_product_fc(Operator):
- """Computes the cross product of two vector fields. Fields can have the
+ r"""Computes the cross product of two vector fields. Fields can have the
same location or Elemental Nodal and Nodal locations.
+
Parameters
----------
- field_or_fields_container_A : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
- field_or_fields_container_B : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
+ field_or_fields_container_A: Field or FieldsContainer or float
+ field or fields container with only one field is expected
+ field_or_fields_container_B: Field or FieldsContainer or float
+ field or fields container with only one field is expected
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -67,9 +70,10 @@ def __init__(
self.inputs.field_or_fields_container_B.connect(field_or_fields_container_B)
@staticmethod
- def _spec():
- description = """Computes the cross product of two vector fields. Fields can have the
- same location or Elemental Nodal and Nodal locations."""
+ def _spec() -> Specification:
+ description = r"""Computes the cross product of two vector fields. Fields can have the
+same location or Elemental Nodal and Nodal locations.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -82,8 +86,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="field_or_fields_container_B",
@@ -94,8 +97,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -103,14 +105,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -119,29 +121,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cross_product_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCrossProductFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCrossProductFc
+ inputs:
+ An instance of InputsCrossProductFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCrossProductFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCrossProductFc
+ outputs:
+ An instance of OutputsCrossProductFc.
"""
return super().outputs
@@ -172,15 +181,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field_or_fields_container_B)
@property
- def field_or_fields_container_A(self):
- """Allows to connect field_or_fields_container_A input to the operator.
+ def field_or_fields_container_A(self) -> Input:
+ r"""Allows to connect field_or_fields_container_A input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field_or_fields_container_A : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -193,15 +202,15 @@ def field_or_fields_container_A(self):
return self._field_or_fields_container_A
@property
- def field_or_fields_container_B(self):
- """Allows to connect field_or_fields_container_B input to the operator.
+ def field_or_fields_container_B(self) -> Input:
+ r"""Allows to connect field_or_fields_container_B input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field_or_fields_container_B : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -232,18 +241,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.cross_product_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/dot.py b/src/ansys/dpf/core/operators/math/dot.py
index 379fd6066d1..dc89e1f0aa8 100644
--- a/src/ansys/dpf/core/operators/math/dot.py
+++ b/src/ansys/dpf/core/operators/math/dot.py
@@ -4,33 +4,36 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class dot(Operator):
- """DEPRECATED, PLEASE USE GENERALIZED INNER PRODUCT. Computes element -
- wise dot product between two vector fields. If one field's scoping
- has an 'overall' location, then this field's values are applied on
- the other field entirely.When using a constant or 'work_by_index',
- you can use 'inplace' to reuse one of the fields, but only in the
- case where both fields are scalar.
+ r"""DEPRECATED, PLEASE USE GENERALIZED INNER PRODUCT. Computes element -
+ wise dot product between two vector fields. If one field’s scoping has
+ an ‘overall’ location, then this field’s values are applied on the other
+ field entirely.When using a constant or ‘work_by_index’, you can use
+ ‘inplace’ to reuse one of the fields, but only in the case where both
+ fields are scalar.
+
Parameters
----------
- fieldA : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- fieldB : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ fieldA: Field or FieldsContainer
+ field or fields container with only one field is expected
+ fieldB: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -65,14 +68,14 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = """DEPRECATED, PLEASE USE GENERALIZED INNER PRODUCT. Computes element -
- wise dot product between two vector fields. If one field's
- scoping has an 'overall' location, then this field's
- values are applied on the other field entirely.When using
- a constant or 'work_by_index', you can use 'inplace' to
- reuse one of the fields, but only in the case where both
- fields are scalar."""
+ def _spec() -> Specification:
+ description = r"""DEPRECATED, PLEASE USE GENERALIZED INNER PRODUCT. Computes element -
+wise dot product between two vector fields. If one field’s scoping has
+an ‘overall’ location, then this field’s values are applied on the other
+field entirely.When using a constant or ‘work_by_index’, you can use
+‘inplace’ to reuse one of the fields, but only in the case where both
+fields are scalar.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -80,15 +83,13 @@ def _spec():
name="fieldA",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="fieldB",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -96,14 +97,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -112,29 +113,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="dot", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsDot:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsDot
+ inputs:
+ An instance of InputsDot.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsDot:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsDot
+ outputs:
+ An instance of OutputsDot.
"""
return super().outputs
@@ -161,15 +169,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldA : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -182,15 +190,15 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -221,18 +229,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.dot()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/dot_tensor.py b/src/ansys/dpf/core/operators/math/dot_tensor.py
index 3612f123a9e..f9e054d4635 100644
--- a/src/ansys/dpf/core/operators/math/dot_tensor.py
+++ b/src/ansys/dpf/core/operators/math/dot_tensor.py
@@ -4,29 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class dot_tensor(Operator):
- """DEPRECATED, PLEASE USE GENERALIZED INNER PRODUCT. Computes element-
- wise dot product between two tensor fields.
+ r"""DEPRECATED, PLEASE USE GENERALIZED INNER PRODUCT. Computes element-wise
+ dot product between two tensor fields.
+
Parameters
----------
- fieldA : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- fieldB : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ fieldA: Field or FieldsContainer
+ field or fields container with only one field is expected
+ fieldB: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -61,9 +64,10 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = """DEPRECATED, PLEASE USE GENERALIZED INNER PRODUCT. Computes element-
- wise dot product between two tensor fields."""
+ def _spec() -> Specification:
+ description = r"""DEPRECATED, PLEASE USE GENERALIZED INNER PRODUCT. Computes element-wise
+dot product between two tensor fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -71,15 +75,13 @@ def _spec():
name="fieldA",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="fieldB",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -87,14 +89,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -103,29 +105,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="dot_tensor", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsDotTensor:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsDotTensor
+ inputs:
+ An instance of InputsDotTensor.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsDotTensor:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsDotTensor
+ outputs:
+ An instance of OutputsDotTensor.
"""
return super().outputs
@@ -152,15 +161,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldA : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -173,15 +182,15 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -212,18 +221,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.dot_tensor()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/entity_extractor.py b/src/ansys/dpf/core/operators/math/entity_extractor.py
index 2ef225601d8..4defd2a9443 100644
--- a/src/ansys/dpf/core/operators/math/entity_extractor.py
+++ b/src/ansys/dpf/core/operators/math/entity_extractor.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class entity_extractor(Operator):
- """Extracts an entity from a field, based on its ID.
+ r"""Extracts an entity from a field, based on its ID.
+
Parameters
----------
- fieldA : Field
- scalar_int : int
+ fieldA: Field
+ scalar_int: int
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -56,8 +61,9 @@ def __init__(self, fieldA=None, scalar_int=None, config=None, server=None):
self.inputs.scalar_int.connect(scalar_int)
@staticmethod
- def _spec():
- description = """Extracts an entity from a field, based on its ID."""
+ def _spec() -> Specification:
+ description = r"""Extracts an entity from a field, based on its ID.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,13 +71,13 @@ def _spec():
name="fieldA",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="scalar_int",
type_names=["int32"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -79,14 +85,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -95,29 +101,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="entity_extractor", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsEntityExtractor:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsEntityExtractor
+ inputs:
+ An instance of InputsEntityExtractor.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsEntityExtractor:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsEntityExtractor
+ outputs:
+ An instance of OutputsEntityExtractor.
"""
return super().outputs
@@ -144,12 +157,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._scalar_int)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Parameters
- ----------
- my_fieldA : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -162,12 +176,13 @@ def fieldA(self):
return self._fieldA
@property
- def scalar_int(self):
- """Allows to connect scalar_int input to the operator.
+ def scalar_int(self) -> Input:
+ r"""Allows to connect scalar_int input to the operator.
- Parameters
- ----------
- my_scalar_int : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -198,18 +213,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.entity_extractor()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/expansion_psd.py b/src/ansys/dpf/core/operators/math/expansion_psd.py
index f088197c20d..354c307913e 100644
--- a/src/ansys/dpf/core/operators/math/expansion_psd.py
+++ b/src/ansys/dpf/core/operators/math/expansion_psd.py
@@ -4,50 +4,38 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class expansion_psd(Operator):
- """Computes the PSD response for one-sigma solution.
+ r"""Computes the PSD response for one-sigma solution.
+
Parameters
----------
- mode_shapes : FieldsContainer
- Fields container containing the mode shapes
- from modal analysis file: mode shapes
- for dynamic and pseudo-static
- displacements
- static_shapes : FieldsContainer, optional
- Fields container containing the static shapes
- (base excitations) from spectral
- analysis file
- rel_rel_covar_matrix : FieldsContainer
- Fields container containing covariance
- matrices from a psd file: covariance
- matrix terms for
- displacement/velocity/acceleration
- mode-mode shapes
- stat_stat_covar_matrix : FieldsContainer, optional
- Fields container containing covariance
- matrices from a psd file: covariance
- matrix terms for
- displacement/velocity/acceleration
- static-static shapes
- rel_stat_covar_matrix : FieldsContainer, optional
- Fields container containing covariance
- matrices from a psd file: covariance
- matrix terms for
- displacement/velocity/acceleration
- mode-static shapes
+ mode_shapes: FieldsContainer
+ Fields container containing the mode shapes from modal analysis file: mode shapes for dynamic and pseudo-static displacements
+ static_shapes: FieldsContainer, optional
+ Fields container containing the static shapes (base excitations) from spectral analysis file
+ rel_rel_covar_matrix: FieldsContainer
+ Fields container containing covariance matrices from a psd file: covariance matrix terms for displacement/velocity/acceleration mode-mode shapes
+ stat_stat_covar_matrix: FieldsContainer, optional
+ Fields container containing covariance matrices from a psd file: covariance matrix terms for displacement/velocity/acceleration static-static shapes
+ rel_stat_covar_matrix: FieldsContainer, optional
+ Fields container containing covariance matrices from a psd file: covariance matrix terms for displacement/velocity/acceleration mode-static shapes
Returns
-------
- psd : FieldsContainer
- Psd solution per label
+ psd: FieldsContainer
+ PSD solution per label
Examples
--------
@@ -106,8 +94,9 @@ def __init__(
self.inputs.rel_stat_covar_matrix.connect(rel_stat_covar_matrix)
@staticmethod
- def _spec():
- description = """Computes the PSD response for one-sigma solution."""
+ def _spec() -> Specification:
+ description = r"""Computes the PSD response for one-sigma solution.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -115,48 +104,31 @@ def _spec():
name="mode_shapes",
type_names=["fields_container"],
optional=False,
- document="""Fields container containing the mode shapes
- from modal analysis file: mode shapes
- for dynamic and pseudo-static
- displacements""",
+ document=r"""Fields container containing the mode shapes from modal analysis file: mode shapes for dynamic and pseudo-static displacements""",
),
1: PinSpecification(
name="static_shapes",
type_names=["fields_container"],
optional=True,
- document="""Fields container containing the static shapes
- (base excitations) from spectral
- analysis file""",
+ document=r"""Fields container containing the static shapes (base excitations) from spectral analysis file""",
),
2: PinSpecification(
name="rel_rel_covar_matrix",
type_names=["fields_container"],
optional=False,
- document="""Fields container containing covariance
- matrices from a psd file: covariance
- matrix terms for
- displacement/velocity/acceleration
- mode-mode shapes""",
+ document=r"""Fields container containing covariance matrices from a psd file: covariance matrix terms for displacement/velocity/acceleration mode-mode shapes """,
),
3: PinSpecification(
name="stat_stat_covar_matrix",
type_names=["fields_container"],
optional=True,
- document="""Fields container containing covariance
- matrices from a psd file: covariance
- matrix terms for
- displacement/velocity/acceleration
- static-static shapes""",
+ document=r"""Fields container containing covariance matrices from a psd file: covariance matrix terms for displacement/velocity/acceleration static-static shapes """,
),
4: PinSpecification(
name="rel_stat_covar_matrix",
type_names=["fields_container"],
optional=True,
- document="""Fields container containing covariance
- matrices from a psd file: covariance
- matrix terms for
- displacement/velocity/acceleration
- mode-static shapes""",
+ document=r"""Fields container containing covariance matrices from a psd file: covariance matrix terms for displacement/velocity/acceleration mode-static shapes """,
),
},
map_output_pin_spec={
@@ -164,14 +136,14 @@ def _spec():
name="psd",
type_names=["fields_container"],
optional=False,
- document="""Psd solution per label""",
+ document=r"""PSD solution per label""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -180,29 +152,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="expansion::psd", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsExpansionPsd:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsExpansionPsd
+ inputs:
+ An instance of InputsExpansionPsd.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsExpansionPsd:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsExpansionPsd
+ outputs:
+ An instance of OutputsExpansionPsd.
"""
return super().outputs
@@ -247,17 +226,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._rel_stat_covar_matrix)
@property
- def mode_shapes(self):
- """Allows to connect mode_shapes input to the operator.
+ def mode_shapes(self) -> Input:
+ r"""Allows to connect mode_shapes input to the operator.
- Fields container containing the mode shapes
- from modal analysis file: mode shapes
- for dynamic and pseudo-static
- displacements
+ Fields container containing the mode shapes from modal analysis file: mode shapes for dynamic and pseudo-static displacements
- Parameters
- ----------
- my_mode_shapes : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -270,16 +247,15 @@ def mode_shapes(self):
return self._mode_shapes
@property
- def static_shapes(self):
- """Allows to connect static_shapes input to the operator.
+ def static_shapes(self) -> Input:
+ r"""Allows to connect static_shapes input to the operator.
- Fields container containing the static shapes
- (base excitations) from spectral
- analysis file
+ Fields container containing the static shapes (base excitations) from spectral analysis file
- Parameters
- ----------
- my_static_shapes : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -292,18 +268,15 @@ def static_shapes(self):
return self._static_shapes
@property
- def rel_rel_covar_matrix(self):
- """Allows to connect rel_rel_covar_matrix input to the operator.
+ def rel_rel_covar_matrix(self) -> Input:
+ r"""Allows to connect rel_rel_covar_matrix input to the operator.
- Fields container containing covariance
- matrices from a psd file: covariance
- matrix terms for
- displacement/velocity/acceleration
- mode-mode shapes
+ Fields container containing covariance matrices from a psd file: covariance matrix terms for displacement/velocity/acceleration mode-mode shapes
- Parameters
- ----------
- my_rel_rel_covar_matrix : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -316,18 +289,15 @@ def rel_rel_covar_matrix(self):
return self._rel_rel_covar_matrix
@property
- def stat_stat_covar_matrix(self):
- """Allows to connect stat_stat_covar_matrix input to the operator.
+ def stat_stat_covar_matrix(self) -> Input:
+ r"""Allows to connect stat_stat_covar_matrix input to the operator.
- Fields container containing covariance
- matrices from a psd file: covariance
- matrix terms for
- displacement/velocity/acceleration
- static-static shapes
+ Fields container containing covariance matrices from a psd file: covariance matrix terms for displacement/velocity/acceleration static-static shapes
- Parameters
- ----------
- my_stat_stat_covar_matrix : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -340,18 +310,15 @@ def stat_stat_covar_matrix(self):
return self._stat_stat_covar_matrix
@property
- def rel_stat_covar_matrix(self):
- """Allows to connect rel_stat_covar_matrix input to the operator.
+ def rel_stat_covar_matrix(self) -> Input:
+ r"""Allows to connect rel_stat_covar_matrix input to the operator.
- Fields container containing covariance
- matrices from a psd file: covariance
- matrix terms for
- displacement/velocity/acceleration
- mode-static shapes
+ Fields container containing covariance matrices from a psd file: covariance matrix terms for displacement/velocity/acceleration mode-static shapes
- Parameters
- ----------
- my_rel_stat_covar_matrix : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -382,18 +349,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._psd)
@property
- def psd(self):
- """Allows to get psd output of the operator
+ def psd(self) -> Output:
+ r"""Allows to get psd output of the operator
+
+ PSD solution per label
Returns
- ----------
- my_psd : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.expansion_psd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_psd = op.outputs.psd()
- """ # noqa: E501
+ """
return self._psd
diff --git a/src/ansys/dpf/core/operators/math/exponential.py b/src/ansys/dpf/core/operators/math/exponential.py
index b8f6965845d..1035ad9491c 100644
--- a/src/ansys/dpf/core/operators/math/exponential.py
+++ b/src/ansys/dpf/core/operators/math/exponential.py
@@ -4,25 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class exponential(Operator):
- """Computes element-wise exp(field[i]).
+ r"""Computes element-wise exp(field[i]).
+
Parameters
----------
- field : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
+ field: Field or FieldsContainer or float
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -52,8 +56,9 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Computes element-wise exp(field[i])."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise exp(field[i]).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -66,8 +71,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -75,14 +79,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -91,29 +95,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="exponential", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsExponential:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsExponential
+ inputs:
+ An instance of InputsExponential.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsExponential:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsExponential
+ outputs:
+ An instance of OutputsExponential.
"""
return super().outputs
@@ -136,15 +147,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -175,18 +186,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.exponential()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/exponential_fc.py b/src/ansys/dpf/core/operators/math/exponential_fc.py
index d0b7b53730e..9f655c7b69e 100644
--- a/src/ansys/dpf/core/operators/math/exponential_fc.py
+++ b/src/ansys/dpf/core/operators/math/exponential_fc.py
@@ -4,25 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class exponential_fc(Operator):
- """Computes element-wise exp(field[i]).
+ r"""Computes element-wise exp(field[i]).
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -52,8 +56,9 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes element-wise exp(field[i])."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise exp(field[i]).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,8 +66,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -70,14 +74,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -86,29 +90,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="exponential_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsExponentialFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsExponentialFc
+ inputs:
+ An instance of InputsExponentialFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsExponentialFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsExponentialFc
+ outputs:
+ An instance of OutputsExponentialFc.
"""
return super().outputs
@@ -131,15 +142,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,18 +181,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.exponential_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/fft_approx.py b/src/ansys/dpf/core/operators/math/fft_approx.py
index 95b061f1a7a..9facb948bcf 100644
--- a/src/ansys/dpf/core/operators/math/fft_approx.py
+++ b/src/ansys/dpf/core/operators/math/fft_approx.py
@@ -4,57 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class fft_approx(Operator):
- """Computes the fitting curve using FFT filtering and cubic fitting in
- space (node i: x=time, y=data), with the possibility to compute
- the first and the second derivatives of the curve.
+ r"""Computes the fitting curve using FFT filtering and cubic fitting in
+ space (node i: x=time, y=data), with the possibility to compute the
+ first and the second derivatives of the curve.
+
Parameters
----------
- time_scoping : Scoping, optional
- A time scoping to rescope / split the fields
- container given as input.
- mesh_scoping : Scoping or ScopingsContainer, optional
- A space (mesh entities) scoping (or scopings
- container) to rescope / split the
- fields container given as input.
- entity_to_fit : FieldsContainer
+ time_scoping: Scoping, optional
+ A time scoping to rescope / split the fields container given as input.
+ mesh_scoping: Scoping or ScopingsContainer, optional
+ A space (mesh entities) scoping (or scopings container) to rescope / split the fields container given as input.
+ entity_to_fit: FieldsContainer
Data changing in time to be fitted.
- component_number : int
- Component number as an integer, for example
- '0' for x-displacement, '1' for
- y-displacement, and so on.
- first_derivative : bool
- Calculate the first derivative (bool). the
- default is false.
- second_derivative : bool
- Calculate the second derivative (bool). the
- default is false.
- fit_data : bool
- Calculate the fitted values (bool). the
- default is false
- cutoff_fr : float or int, optional
+ component_number: int
+ Component number as an integer, for example '0' for X-displacement, '1' for Y-displacement, and so on.
+ first_derivative: bool
+ Calculate the first derivative (bool). The default is false.
+ second_derivative: bool
+ Calculate the second derivative (bool). The default is false.
+ fit_data: bool
+ Calculate the fitted values (bool). The default is false
+ cutoff_fr: float or int, optional
Cutoff frequency.
Returns
-------
- fitted_entity_y : FieldsContainer
- The fitted entity is fitted using fft along
- the space scoping (node i: x=time,
- y=data). fitted y is expected to be
- close to the input data.
- first_der_dy : FieldsContainer
- The first derivative (dy) from the fitted y.
- second_der_d2y : FieldsContainer
- The second derivative (d2y) from the fitted
- y.
+ fitted_entity_y: FieldsContainer
+ The fitted entity is fitted using FFT along the space scoping (node i: x=time, y=data). Fitted Y is expected to be close to the input data.
+ first_der_dy: FieldsContainer
+ The first derivative (dY) from the fitted Y.
+ second_der_d2y: FieldsContainer
+ The second derivative (d2Y) from the fitted Y.
Examples
--------
@@ -133,10 +126,11 @@ def __init__(
self.inputs.cutoff_fr.connect(cutoff_fr)
@staticmethod
- def _spec():
- description = """Computes the fitting curve using FFT filtering and cubic fitting in
- space (node i: x=time, y=data), with the possibility to
- compute the first and the second derivatives of the curve."""
+ def _spec() -> Specification:
+ description = r"""Computes the fitting curve using FFT filtering and cubic fitting in
+space (node i: x=time, y=data), with the possibility to compute the
+first and the second derivatives of the curve.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -144,57 +138,49 @@ def _spec():
name="time_scoping",
type_names=["vector", "scoping"],
optional=True,
- document="""A time scoping to rescope / split the fields
- container given as input.""",
+ document=r"""A time scoping to rescope / split the fields container given as input.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["umap", "scoping", "scopings_container"],
optional=True,
- document="""A space (mesh entities) scoping (or scopings
- container) to rescope / split the
- fields container given as input.""",
+ document=r"""A space (mesh entities) scoping (or scopings container) to rescope / split the fields container given as input.""",
),
2: PinSpecification(
name="entity_to_fit",
type_names=["fields_container"],
optional=False,
- document="""Data changing in time to be fitted.""",
+ document=r"""Data changing in time to be fitted.""",
),
3: PinSpecification(
name="component_number",
type_names=["int32"],
optional=False,
- document="""Component number as an integer, for example
- '0' for x-displacement, '1' for
- y-displacement, and so on.""",
+ document=r"""Component number as an integer, for example '0' for X-displacement, '1' for Y-displacement, and so on.""",
),
4: PinSpecification(
name="first_derivative",
type_names=["bool"],
optional=False,
- document="""Calculate the first derivative (bool). the
- default is false.""",
+ document=r"""Calculate the first derivative (bool). The default is false.""",
),
5: PinSpecification(
name="second_derivative",
type_names=["bool"],
optional=False,
- document="""Calculate the second derivative (bool). the
- default is false.""",
+ document=r"""Calculate the second derivative (bool). The default is false.""",
),
6: PinSpecification(
name="fit_data",
type_names=["bool"],
optional=False,
- document="""Calculate the fitted values (bool). the
- default is false""",
+ document=r"""Calculate the fitted values (bool). The default is false""",
),
7: PinSpecification(
name="cutoff_fr",
type_names=["double", "int32"],
optional=True,
- document="""Cutoff frequency.""",
+ document=r"""Cutoff frequency.""",
),
},
map_output_pin_spec={
@@ -202,30 +188,26 @@ def _spec():
name="fitted_entity_y",
type_names=["fields_container"],
optional=False,
- document="""The fitted entity is fitted using fft along
- the space scoping (node i: x=time,
- y=data). fitted y is expected to be
- close to the input data.""",
+ document=r"""The fitted entity is fitted using FFT along the space scoping (node i: x=time, y=data). Fitted Y is expected to be close to the input data.""",
),
1: PinSpecification(
name="first_der_dy",
type_names=["fields_container"],
optional=False,
- document="""The first derivative (dy) from the fitted y.""",
+ document=r"""The first derivative (dY) from the fitted Y.""",
),
2: PinSpecification(
name="second_der_d2y",
type_names=["fields_container"],
optional=False,
- document="""The second derivative (d2y) from the fitted
- y.""",
+ document=r"""The second derivative (d2Y) from the fitted Y.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -234,29 +216,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="fft_approx", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFftApprox:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFftApprox
+ inputs:
+ An instance of InputsFftApprox.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFftApprox:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFftApprox
+ outputs:
+ An instance of OutputsFftApprox.
"""
return super().outputs
@@ -307,15 +296,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._cutoff_fr)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- A time scoping to rescope / split the fields
- container given as input.
+ A time scoping to rescope / split the fields container given as input.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -328,16 +317,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- A space (mesh entities) scoping (or scopings
- container) to rescope / split the
- fields container given as input.
+ A space (mesh entities) scoping (or scopings container) to rescope / split the fields container given as input.
- Parameters
- ----------
- my_mesh_scoping : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -350,14 +338,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def entity_to_fit(self):
- """Allows to connect entity_to_fit input to the operator.
+ def entity_to_fit(self) -> Input:
+ r"""Allows to connect entity_to_fit input to the operator.
Data changing in time to be fitted.
- Parameters
- ----------
- my_entity_to_fit : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -370,16 +359,15 @@ def entity_to_fit(self):
return self._entity_to_fit
@property
- def component_number(self):
- """Allows to connect component_number input to the operator.
+ def component_number(self) -> Input:
+ r"""Allows to connect component_number input to the operator.
- Component number as an integer, for example
- '0' for x-displacement, '1' for
- y-displacement, and so on.
+ Component number as an integer, for example '0' for X-displacement, '1' for Y-displacement, and so on.
- Parameters
- ----------
- my_component_number : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -392,15 +380,15 @@ def component_number(self):
return self._component_number
@property
- def first_derivative(self):
- """Allows to connect first_derivative input to the operator.
+ def first_derivative(self) -> Input:
+ r"""Allows to connect first_derivative input to the operator.
- Calculate the first derivative (bool). the
- default is false.
+ Calculate the first derivative (bool). The default is false.
- Parameters
- ----------
- my_first_derivative : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -413,15 +401,15 @@ def first_derivative(self):
return self._first_derivative
@property
- def second_derivative(self):
- """Allows to connect second_derivative input to the operator.
+ def second_derivative(self) -> Input:
+ r"""Allows to connect second_derivative input to the operator.
- Calculate the second derivative (bool). the
- default is false.
+ Calculate the second derivative (bool). The default is false.
- Parameters
- ----------
- my_second_derivative : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -434,15 +422,15 @@ def second_derivative(self):
return self._second_derivative
@property
- def fit_data(self):
- """Allows to connect fit_data input to the operator.
+ def fit_data(self) -> Input:
+ r"""Allows to connect fit_data input to the operator.
- Calculate the fitted values (bool). the
- default is false
+ Calculate the fitted values (bool). The default is false
- Parameters
- ----------
- my_fit_data : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -455,14 +443,15 @@ def fit_data(self):
return self._fit_data
@property
- def cutoff_fr(self):
- """Allows to connect cutoff_fr input to the operator.
+ def cutoff_fr(self) -> Input:
+ r"""Allows to connect cutoff_fr input to the operator.
Cutoff frequency.
- Parameters
- ----------
- my_cutoff_fr : float or int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -499,52 +488,61 @@ def __init__(self, op: Operator):
self._outputs.append(self._second_der_d2y)
@property
- def fitted_entity_y(self):
- """Allows to get fitted_entity_y output of the operator
+ def fitted_entity_y(self) -> Output:
+ r"""Allows to get fitted_entity_y output of the operator
+
+ The fitted entity is fitted using FFT along the space scoping (node i: x=time, y=data). Fitted Y is expected to be close to the input data.
Returns
- ----------
- my_fitted_entity_y : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.fft_approx()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fitted_entity_y = op.outputs.fitted_entity_y()
- """ # noqa: E501
+ """
return self._fitted_entity_y
@property
- def first_der_dy(self):
- """Allows to get first_der_dy output of the operator
+ def first_der_dy(self) -> Output:
+ r"""Allows to get first_der_dy output of the operator
+
+ The first derivative (dY) from the fitted Y.
Returns
- ----------
- my_first_der_dy : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.fft_approx()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_first_der_dy = op.outputs.first_der_dy()
- """ # noqa: E501
+ """
return self._first_der_dy
@property
- def second_der_d2y(self):
- """Allows to get second_der_d2y output of the operator
+ def second_der_d2y(self) -> Output:
+ r"""Allows to get second_der_d2y output of the operator
+
+ The second derivative (d2Y) from the fitted Y.
Returns
- ----------
- my_second_der_d2y : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.fft_approx()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_second_der_d2y = op.outputs.second_der_d2y()
- """ # noqa: E501
+ """
return self._second_der_d2y
diff --git a/src/ansys/dpf/core/operators/math/fft_eval.py b/src/ansys/dpf/core/operators/math/fft_eval.py
index c1c0e829125..0137a3625d5 100644
--- a/src/ansys/dpf/core/operators/math/fft_eval.py
+++ b/src/ansys/dpf/core/operators/math/fft_eval.py
@@ -4,28 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class fft_eval(Operator):
- """Evaluate the fast fourier transforms at a given set of fields.
+ r"""Evaluate the fast fourier transforms at a given set of fields.
+
Parameters
----------
- field_t : Field
- Field of values to evaluate
- time_scoping : Scoping, optional
- If specified only the results at these set
- ids are used
+ field_t: Field
+ field of values to evaluate
+ time_scoping: Scoping, optional
+ if specified only the results at these set ids are used
Returns
-------
- field : Field
- offset : Field
+ field: Field
+ offset: Field
Examples
--------
@@ -61,10 +65,9 @@ def __init__(self, field_t=None, time_scoping=None, config=None, server=None):
self.inputs.time_scoping.connect(time_scoping)
@staticmethod
- def _spec():
- description = (
- """Evaluate the fast fourier transforms at a given set of fields."""
- )
+ def _spec() -> Specification:
+ description = r"""Evaluate the fast fourier transforms at a given set of fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -72,14 +75,13 @@ def _spec():
name="field_t",
type_names=["field"],
optional=False,
- document="""Field of values to evaluate""",
+ document=r"""field of values to evaluate""",
),
1: PinSpecification(
name="time_scoping",
type_names=["scoping"],
optional=True,
- document="""If specified only the results at these set
- ids are used""",
+ document=r"""if specified only the results at these set ids are used""",
),
},
map_output_pin_spec={
@@ -87,20 +89,20 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="offset",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -109,29 +111,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="fft_eval", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFftEval:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFftEval
+ inputs:
+ An instance of InputsFftEval.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFftEval:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFftEval
+ outputs:
+ An instance of OutputsFftEval.
"""
return super().outputs
@@ -158,14 +167,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._time_scoping)
@property
- def field_t(self):
- """Allows to connect field_t input to the operator.
+ def field_t(self) -> Input:
+ r"""Allows to connect field_t input to the operator.
- Field of values to evaluate
+ field of values to evaluate
- Parameters
- ----------
- my_field_t : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -178,15 +188,15 @@ def field_t(self):
return self._field_t
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- If specified only the results at these set
- ids are used
+ if specified only the results at these set ids are used
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -220,35 +230,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._offset)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.fft_eval()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
@property
- def offset(self):
- """Allows to get offset output of the operator
+ def offset(self) -> Output:
+ r"""Allows to get offset output of the operator
Returns
- ----------
- my_offset : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.fft_eval()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_offset = op.outputs.offset()
- """ # noqa: E501
+ """
return self._offset
diff --git a/src/ansys/dpf/core/operators/math/fft_gradient_eval.py b/src/ansys/dpf/core/operators/math/fft_gradient_eval.py
index 26bf26e43e7..7ba35167575 100644
--- a/src/ansys/dpf/core/operators/math/fft_gradient_eval.py
+++ b/src/ansys/dpf/core/operators/math/fft_gradient_eval.py
@@ -4,29 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class fft_gradient_eval(Operator):
- """Evaluate min max based on the fast fourier transform at a given field,
+ r"""Evaluate min max based on the fast fourier transform at a given field,
using gradient method for adaptative time step.
+
Parameters
----------
- fields_container : FieldsContainer
- time_scoping : Scoping, optional
- If specified only the results at these set
- ids are used
- fs_ratio : int, optional
- Default value = 20
+ fields_container: FieldsContainer
+ time_scoping: Scoping, optional
+ if specified only the results at these set ids are used
+ fs_ratio: int, optional
+ default value = 20
Returns
-------
- coefficients : FieldsContainer
+ coefficients: FieldsContainer
Examples
--------
@@ -73,9 +77,10 @@ def __init__(
self.inputs.fs_ratio.connect(fs_ratio)
@staticmethod
- def _spec():
- description = """Evaluate min max based on the fast fourier transform at a given field,
- using gradient method for adaptative time step."""
+ def _spec() -> Specification:
+ description = r"""Evaluate min max based on the fast fourier transform at a given field,
+using gradient method for adaptative time step.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -83,20 +88,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="time_scoping",
type_names=["scoping"],
optional=True,
- document="""If specified only the results at these set
- ids are used""",
+ document=r"""if specified only the results at these set ids are used""",
),
2: PinSpecification(
name="fs_ratio",
type_names=["int32"],
optional=True,
- document="""Default value = 20""",
+ document=r"""default value = 20""",
),
},
map_output_pin_spec={
@@ -104,14 +108,14 @@ def _spec():
name="coefficients",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -120,29 +124,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="fft_eval_gr", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFftGradientEval:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFftGradientEval
+ inputs:
+ An instance of InputsFftGradientEval.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFftGradientEval:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFftGradientEval
+ outputs:
+ An instance of OutputsFftGradientEval.
"""
return super().outputs
@@ -175,12 +186,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fs_ratio)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -193,15 +205,15 @@ def fields_container(self):
return self._fields_container
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- If specified only the results at these set
- ids are used
+ if specified only the results at these set ids are used
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -214,14 +226,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def fs_ratio(self):
- """Allows to connect fs_ratio input to the operator.
+ def fs_ratio(self) -> Input:
+ r"""Allows to connect fs_ratio input to the operator.
- Default value = 20
+ default value = 20
- Parameters
- ----------
- my_fs_ratio : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -252,18 +265,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._coefficients)
@property
- def coefficients(self):
- """Allows to get coefficients output of the operator
+ def coefficients(self) -> Output:
+ r"""Allows to get coefficients output of the operator
Returns
- ----------
- my_coefficients : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.fft_gradient_eval()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_coefficients = op.outputs.coefficients()
- """ # noqa: E501
+ """
return self._coefficients
diff --git a/src/ansys/dpf/core/operators/math/fft_multi_harmonic_minmax.py b/src/ansys/dpf/core/operators/math/fft_multi_harmonic_minmax.py
index 6c9526fa873..b9946bcef32 100644
--- a/src/ansys/dpf/core/operators/math/fft_multi_harmonic_minmax.py
+++ b/src/ansys/dpf/core/operators/math/fft_multi_harmonic_minmax.py
@@ -4,54 +4,48 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class fft_multi_harmonic_minmax(Operator):
- """Evaluate min max fields on multi harmonic solution. min and max fields
- are calculated based on evaluating a fourier series sum wrt rpms
- and using the gradient method for adaptive time steping
+ r"""Evaluate min max fields on multi harmonic solution. min and max fields
+ are calculated based on evaluating a fourier series sum wrt rpms and
+ using the gradient method for adaptive time steping
+
Parameters
----------
- fields_container : FieldsContainer
- rpm_scoping : Scoping, optional
- Rpm scoping, by default the fourier series
- sum is evaluated using all the rpms
- fs_ratio : int, optional
- Field or fields container with only one field
- is expected
- num_subdivisions : int, optional
- Connect number subdivisions, used for uniform
- discretization
- max_num_subdivisions : int, optional
- Connect max number subdivisions, used to
- avoid huge number of sudivisions
- num_cycles : int, optional
- Number of cycle of the periodic signal
- (default is 2)
- use_harmonic_zero : bool, optional
- Use harmonic zero for first rpm (default is
- false)
- calculate_time_series : bool, optional
- Calculates time series output (output pin 2),
- setting it to false enhance
- performance if only min/max are
- required (default is true)
- substeps_selector : optional
- Substeps to evaluate (frequencies), by
- default the operator is evaluated
- using all the available steps
+ fields_container: FieldsContainer
+ rpm_scoping: Scoping, optional
+ rpm scoping, by default the fourier series sum is evaluated using all the rpms
+ fs_ratio: int, optional
+ field or fields container with only one field is expected
+ num_subdivisions: int, optional
+ connect number subdivisions, used for uniform discretization
+ max_num_subdivisions: int, optional
+ connect max number subdivisions, used to avoid huge number of sudivisions
+ num_cycles: int, optional
+ Number of cycle of the periodic signal (default is 2)
+ use_harmonic_zero: bool, optional
+ use harmonic zero for first rpm (default is false)
+ calculate_time_series: bool, optional
+ calculates time series output (output pin 2), setting it to false enhance performance if only min/max are required (default is true)
+ substeps_selector: optional
+ substeps to evaluate (frequencies), by default the operator is evaluated using all the available steps
Returns
-------
- field_min : FieldsContainer
- field_max : FieldsContainer
- all_fields : FieldsContainer
+ field_min: FieldsContainer
+ field_max: FieldsContainer
+ all_fields: FieldsContainer
Examples
--------
@@ -136,11 +130,11 @@ def __init__(
self.inputs.substeps_selector.connect(substeps_selector)
@staticmethod
- def _spec():
- description = """Evaluate min max fields on multi harmonic solution. min and max fields
- are calculated based on evaluating a fourier series sum
- wrt rpms and using the gradient method for adaptive time
- steping"""
+ def _spec() -> Specification:
+ description = r"""Evaluate min max fields on multi harmonic solution. min and max fields
+are calculated based on evaluating a fourier series sum wrt rpms and
+using the gradient method for adaptive time steping
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -148,66 +142,55 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="rpm_scoping",
type_names=["scoping"],
optional=True,
- document="""Rpm scoping, by default the fourier series
- sum is evaluated using all the rpms""",
+ document=r"""rpm scoping, by default the fourier series sum is evaluated using all the rpms""",
),
2: PinSpecification(
name="fs_ratio",
type_names=["int32"],
optional=True,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
3: PinSpecification(
name="num_subdivisions",
type_names=["int32"],
optional=True,
- document="""Connect number subdivisions, used for uniform
- discretization""",
+ document=r"""connect number subdivisions, used for uniform discretization""",
),
4: PinSpecification(
name="max_num_subdivisions",
type_names=["int32"],
optional=True,
- document="""Connect max number subdivisions, used to
- avoid huge number of sudivisions""",
+ document=r"""connect max number subdivisions, used to avoid huge number of sudivisions""",
),
5: PinSpecification(
name="num_cycles",
type_names=["int32"],
optional=True,
- document="""Number of cycle of the periodic signal
- (default is 2)""",
+ document=r"""Number of cycle of the periodic signal (default is 2)""",
),
6: PinSpecification(
name="use_harmonic_zero",
type_names=["bool"],
optional=True,
- document="""Use harmonic zero for first rpm (default is
- false)""",
+ document=r"""use harmonic zero for first rpm (default is false)""",
),
7: PinSpecification(
name="calculate_time_series",
type_names=["bool"],
optional=True,
- document="""Calculates time series output (output pin 2),
- setting it to false enhance
- performance if only min/max are
- required (default is true)""",
+ document=r"""calculates time series output (output pin 2), setting it to false enhance performance if only min/max are required (default is true)""",
),
8: PinSpecification(
name="substeps_selector",
type_names=["vector"],
optional=True,
- document="""Substeps to evaluate (frequencies), by
- default the operator is evaluated
- using all the available steps""",
+ document=r"""substeps to evaluate (frequencies), by default the operator is evaluated using all the available steps""",
),
},
map_output_pin_spec={
@@ -215,26 +198,26 @@ def _spec():
name="field_min",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="field_max",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="all_fields",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -243,29 +226,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="fft_multi_harmonic_minmax", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFftMultiHarmonicMinmax:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFftMultiHarmonicMinmax
+ inputs:
+ An instance of InputsFftMultiHarmonicMinmax.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFftMultiHarmonicMinmax:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFftMultiHarmonicMinmax
+ outputs:
+ An instance of OutputsFftMultiHarmonicMinmax.
"""
return super().outputs
@@ -338,12 +328,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._substeps_selector)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -356,15 +347,15 @@ def fields_container(self):
return self._fields_container
@property
- def rpm_scoping(self):
- """Allows to connect rpm_scoping input to the operator.
+ def rpm_scoping(self) -> Input:
+ r"""Allows to connect rpm_scoping input to the operator.
- Rpm scoping, by default the fourier series
- sum is evaluated using all the rpms
+ rpm scoping, by default the fourier series sum is evaluated using all the rpms
- Parameters
- ----------
- my_rpm_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -377,15 +368,15 @@ def rpm_scoping(self):
return self._rpm_scoping
@property
- def fs_ratio(self):
- """Allows to connect fs_ratio input to the operator.
+ def fs_ratio(self) -> Input:
+ r"""Allows to connect fs_ratio input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fs_ratio : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -398,15 +389,15 @@ def fs_ratio(self):
return self._fs_ratio
@property
- def num_subdivisions(self):
- """Allows to connect num_subdivisions input to the operator.
+ def num_subdivisions(self) -> Input:
+ r"""Allows to connect num_subdivisions input to the operator.
- Connect number subdivisions, used for uniform
- discretization
+ connect number subdivisions, used for uniform discretization
- Parameters
- ----------
- my_num_subdivisions : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -419,15 +410,15 @@ def num_subdivisions(self):
return self._num_subdivisions
@property
- def max_num_subdivisions(self):
- """Allows to connect max_num_subdivisions input to the operator.
+ def max_num_subdivisions(self) -> Input:
+ r"""Allows to connect max_num_subdivisions input to the operator.
- Connect max number subdivisions, used to
- avoid huge number of sudivisions
+ connect max number subdivisions, used to avoid huge number of sudivisions
- Parameters
- ----------
- my_max_num_subdivisions : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -440,15 +431,15 @@ def max_num_subdivisions(self):
return self._max_num_subdivisions
@property
- def num_cycles(self):
- """Allows to connect num_cycles input to the operator.
+ def num_cycles(self) -> Input:
+ r"""Allows to connect num_cycles input to the operator.
- Number of cycle of the periodic signal
- (default is 2)
+ Number of cycle of the periodic signal (default is 2)
- Parameters
- ----------
- my_num_cycles : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -461,15 +452,15 @@ def num_cycles(self):
return self._num_cycles
@property
- def use_harmonic_zero(self):
- """Allows to connect use_harmonic_zero input to the operator.
+ def use_harmonic_zero(self) -> Input:
+ r"""Allows to connect use_harmonic_zero input to the operator.
- Use harmonic zero for first rpm (default is
- false)
+ use harmonic zero for first rpm (default is false)
- Parameters
- ----------
- my_use_harmonic_zero : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -482,17 +473,15 @@ def use_harmonic_zero(self):
return self._use_harmonic_zero
@property
- def calculate_time_series(self):
- """Allows to connect calculate_time_series input to the operator.
+ def calculate_time_series(self) -> Input:
+ r"""Allows to connect calculate_time_series input to the operator.
- Calculates time series output (output pin 2),
- setting it to false enhance
- performance if only min/max are
- required (default is true)
+ calculates time series output (output pin 2), setting it to false enhance performance if only min/max are required (default is true)
- Parameters
- ----------
- my_calculate_time_series : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -505,16 +494,15 @@ def calculate_time_series(self):
return self._calculate_time_series
@property
- def substeps_selector(self):
- """Allows to connect substeps_selector input to the operator.
+ def substeps_selector(self) -> Input:
+ r"""Allows to connect substeps_selector input to the operator.
- Substeps to evaluate (frequencies), by
- default the operator is evaluated
- using all the available steps
+ substeps to evaluate (frequencies), by default the operator is evaluated using all the available steps
- Parameters
- ----------
- my_substeps_selector :
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -553,52 +541,55 @@ def __init__(self, op: Operator):
self._outputs.append(self._all_fields)
@property
- def field_min(self):
- """Allows to get field_min output of the operator
+ def field_min(self) -> Output:
+ r"""Allows to get field_min output of the operator
Returns
- ----------
- my_field_min : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.fft_multi_harmonic_minmax()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_min = op.outputs.field_min()
- """ # noqa: E501
+ """
return self._field_min
@property
- def field_max(self):
- """Allows to get field_max output of the operator
+ def field_max(self) -> Output:
+ r"""Allows to get field_max output of the operator
Returns
- ----------
- my_field_max : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.fft_multi_harmonic_minmax()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_max = op.outputs.field_max()
- """ # noqa: E501
+ """
return self._field_max
@property
- def all_fields(self):
- """Allows to get all_fields output of the operator
+ def all_fields(self) -> Output:
+ r"""Allows to get all_fields output of the operator
Returns
- ----------
- my_all_fields : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.fft_multi_harmonic_minmax()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_all_fields = op.outputs.all_fields()
- """ # noqa: E501
+ """
return self._all_fields
diff --git a/src/ansys/dpf/core/operators/math/generalized_inner_product.py b/src/ansys/dpf/core/operators/math/generalized_inner_product.py
index 07e20e33c99..b8a9f6f8c82 100644
--- a/src/ansys/dpf/core/operators/math/generalized_inner_product.py
+++ b/src/ansys/dpf/core/operators/math/generalized_inner_product.py
@@ -4,29 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class generalized_inner_product(Operator):
- """Computes a general notion of inner product between two fields of
+ r"""Computes a general notion of inner product between two fields of
possibly different dimensionality.
+
Parameters
----------
- fieldA : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
- fieldB : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
+ fieldA: Field or FieldsContainer or float
+ field or fields container with only one field is expected
+ fieldB: Field or FieldsContainer or float
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -61,9 +64,10 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = """Computes a general notion of inner product between two fields of
- possibly different dimensionality."""
+ def _spec() -> Specification:
+ description = r"""Computes a general notion of inner product between two fields of
+possibly different dimensionality.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -76,8 +80,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="fieldB",
@@ -88,8 +91,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -97,14 +99,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -113,29 +115,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="generalized_inner_product", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsGeneralizedInnerProduct:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsGeneralizedInnerProduct
+ inputs:
+ An instance of InputsGeneralizedInnerProduct.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsGeneralizedInnerProduct:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsGeneralizedInnerProduct
+ outputs:
+ An instance of OutputsGeneralizedInnerProduct.
"""
return super().outputs
@@ -162,15 +171,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldA : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -183,15 +192,15 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -222,18 +231,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.generalized_inner_product()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/generalized_inner_product_fc.py b/src/ansys/dpf/core/operators/math/generalized_inner_product_fc.py
index 70203958751..22f57849085 100644
--- a/src/ansys/dpf/core/operators/math/generalized_inner_product_fc.py
+++ b/src/ansys/dpf/core/operators/math/generalized_inner_product_fc.py
@@ -4,29 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class generalized_inner_product_fc(Operator):
- """Computes a general notion of inner product between two fields of
+ r"""Computes a general notion of inner product between two fields of
possibly different dimensionality.
+
Parameters
----------
- field_or_fields_container_A : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
- field_or_fields_container_B : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
+ field_or_fields_container_A: Field or FieldsContainer or float
+ field or fields container with only one field is expected
+ field_or_fields_container_B: Field or FieldsContainer or float
+ field or fields container with only one field is expected
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -69,9 +72,10 @@ def __init__(
self.inputs.field_or_fields_container_B.connect(field_or_fields_container_B)
@staticmethod
- def _spec():
- description = """Computes a general notion of inner product between two fields of
- possibly different dimensionality."""
+ def _spec() -> Specification:
+ description = r"""Computes a general notion of inner product between two fields of
+possibly different dimensionality.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -84,8 +88,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="field_or_fields_container_B",
@@ -96,8 +99,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -105,14 +107,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -121,31 +123,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="generalized_inner_product_fc", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsGeneralizedInnerProductFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsGeneralizedInnerProductFc
+ inputs:
+ An instance of InputsGeneralizedInnerProductFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsGeneralizedInnerProductFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsGeneralizedInnerProductFc
+ outputs:
+ An instance of OutputsGeneralizedInnerProductFc.
"""
return super().outputs
@@ -176,15 +185,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field_or_fields_container_B)
@property
- def field_or_fields_container_A(self):
- """Allows to connect field_or_fields_container_A input to the operator.
+ def field_or_fields_container_A(self) -> Input:
+ r"""Allows to connect field_or_fields_container_A input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field_or_fields_container_A : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -197,15 +206,15 @@ def field_or_fields_container_A(self):
return self._field_or_fields_container_A
@property
- def field_or_fields_container_B(self):
- """Allows to connect field_or_fields_container_B input to the operator.
+ def field_or_fields_container_B(self) -> Input:
+ r"""Allows to connect field_or_fields_container_B input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field_or_fields_container_B : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -238,18 +247,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.generalized_inner_product_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/img_part.py b/src/ansys/dpf/core/operators/math/img_part.py
index 29a7f4a1a2e..32e43818778 100644
--- a/src/ansys/dpf/core/operators/math/img_part.py
+++ b/src/ansys/dpf/core/operators/math/img_part.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class img_part(Operator):
- """Extracts element-wise imaginary part of field containers containing
+ r"""Extracts element-wise imaginary part of field containers containing
complex fields.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -51,9 +56,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Extracts element-wise imaginary part of field containers containing
- complex fields."""
+ def _spec() -> Specification:
+ description = r"""Extracts element-wise imaginary part of field containers containing
+complex fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,7 +67,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -69,14 +75,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -85,29 +91,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="img_part", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsImgPart:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsImgPart
+ inputs:
+ An instance of InputsImgPart.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsImgPart:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsImgPart
+ outputs:
+ An instance of OutputsImgPart.
"""
return super().outputs
@@ -130,12 +143,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -166,18 +180,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.img_part()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/invert.py b/src/ansys/dpf/core/operators/math/invert.py
index 6ede1d20390..d8f011ed7ab 100644
--- a/src/ansys/dpf/core/operators/math/invert.py
+++ b/src/ansys/dpf/core/operators/math/invert.py
@@ -4,26 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class invert(Operator):
- """DEPRECATED, PLEASE USE DIVIDE. Computes the element-wise and
+ r"""DEPRECATED, PLEASE USE DIVIDE. Computes the element-wise and
component-wise inverse of a field (1./x).
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -53,9 +57,10 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """DEPRECATED, PLEASE USE DIVIDE. Computes the element-wise and
- component-wise inverse of a field (1./x)."""
+ def _spec() -> Specification:
+ description = r"""DEPRECATED, PLEASE USE DIVIDE. Computes the element-wise and
+component-wise inverse of a field (1./x).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -63,8 +68,7 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -72,14 +76,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -88,29 +92,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="invert", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsInvert:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsInvert
+ inputs:
+ An instance of InputsInvert.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsInvert:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsInvert
+ outputs:
+ An instance of OutputsInvert.
"""
return super().outputs
@@ -133,15 +144,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -172,18 +183,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.invert()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/invert_fc.py b/src/ansys/dpf/core/operators/math/invert_fc.py
index 864f65f83cc..03bbb09c0ff 100644
--- a/src/ansys/dpf/core/operators/math/invert_fc.py
+++ b/src/ansys/dpf/core/operators/math/invert_fc.py
@@ -4,26 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class invert_fc(Operator):
- """DEPRECATED, PLEASE USE DIVIDE. Computes the element-wise and
+ r"""DEPRECATED, PLEASE USE DIVIDE. Computes the element-wise and
component-wise inverse of a field (1./x).
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -53,9 +57,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """DEPRECATED, PLEASE USE DIVIDE. Computes the element-wise and
- component-wise inverse of a field (1./x)."""
+ def _spec() -> Specification:
+ description = r"""DEPRECATED, PLEASE USE DIVIDE. Computes the element-wise and
+component-wise inverse of a field (1./x).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -63,8 +68,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -72,14 +76,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -88,29 +92,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="invert_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsInvertFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsInvertFc
+ inputs:
+ An instance of InputsInvertFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsInvertFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsInvertFc
+ outputs:
+ An instance of OutputsInvertFc.
"""
return super().outputs
@@ -133,15 +144,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -172,18 +183,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.invert_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/kronecker_prod.py b/src/ansys/dpf/core/operators/math/kronecker_prod.py
index 52a7c7ef4f2..8ee90135ab0 100644
--- a/src/ansys/dpf/core/operators/math/kronecker_prod.py
+++ b/src/ansys/dpf/core/operators/math/kronecker_prod.py
@@ -4,28 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class kronecker_prod(Operator):
- """Computes element-wise Kronecker product between two tensor fields.
+ r"""Computes element-wise Kronecker product between two tensor fields.
+
Parameters
----------
- fieldA : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- fieldB : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ fieldA: Field or FieldsContainer
+ field or fields container with only one field is expected
+ fieldB: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -60,10 +63,9 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = (
- """Computes element-wise Kronecker product between two tensor fields."""
- )
+ def _spec() -> Specification:
+ description = r"""Computes element-wise Kronecker product between two tensor fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -71,15 +73,13 @@ def _spec():
name="fieldA",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="fieldB",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -87,14 +87,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -103,29 +103,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="kronecker_prod", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsKroneckerProd:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsKroneckerProd
+ inputs:
+ An instance of InputsKroneckerProd.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsKroneckerProd:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsKroneckerProd
+ outputs:
+ An instance of OutputsKroneckerProd.
"""
return super().outputs
@@ -152,15 +159,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldA : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -173,15 +180,15 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -212,18 +219,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.kronecker_prod()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/linear_combination.py b/src/ansys/dpf/core/operators/math/linear_combination.py
index 67e8d1a5c8f..a061a7e9221 100644
--- a/src/ansys/dpf/core/operators/math/linear_combination.py
+++ b/src/ansys/dpf/core/operators/math/linear_combination.py
@@ -4,30 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class linear_combination(Operator):
- """Computes aXY + bZ where a,b (in 0, in 3) are scalar and X,Y,Z (in
- 1,2,4) are complex numbers.
+ r"""Computes aXY + bZ where a,b (in 0, in 3) are scalar and X,Y,Z (in 1,2,4)
+ are complex numbers.
+
Parameters
----------
- a : float
+ a: float
Double
- fields_containerA : FieldsContainer
- fields_containerB : FieldsContainer
- b : float
+ fields_containerA: FieldsContainer
+ fields_containerB: FieldsContainer
+ b: float
Double
- fields_containerC : FieldsContainer
+ fields_containerC: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -86,9 +91,10 @@ def __init__(
self.inputs.fields_containerC.connect(fields_containerC)
@staticmethod
- def _spec():
- description = """Computes aXY + bZ where a,b (in 0, in 3) are scalar and X,Y,Z (in
- 1,2,4) are complex numbers."""
+ def _spec() -> Specification:
+ description = r"""Computes aXY + bZ where a,b (in 0, in 3) are scalar and X,Y,Z (in 1,2,4)
+are complex numbers.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -96,31 +102,31 @@ def _spec():
name="a",
type_names=["double"],
optional=False,
- document="""Double""",
+ document=r"""Double""",
),
1: PinSpecification(
name="fields_containerA",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_containerB",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="b",
type_names=["double"],
optional=False,
- document="""Double""",
+ document=r"""Double""",
),
4: PinSpecification(
name="fields_containerC",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -128,14 +134,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -144,29 +150,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="CplxOp", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsLinearCombination:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsLinearCombination
+ inputs:
+ An instance of InputsLinearCombination.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsLinearCombination:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsLinearCombination
+ outputs:
+ An instance of OutputsLinearCombination.
"""
return super().outputs
@@ -211,14 +224,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_containerC)
@property
- def a(self):
- """Allows to connect a input to the operator.
+ def a(self) -> Input:
+ r"""Allows to connect a input to the operator.
Double
- Parameters
- ----------
- my_a : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -231,12 +245,13 @@ def a(self):
return self._a
@property
- def fields_containerA(self):
- """Allows to connect fields_containerA input to the operator.
+ def fields_containerA(self) -> Input:
+ r"""Allows to connect fields_containerA input to the operator.
- Parameters
- ----------
- my_fields_containerA : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -249,12 +264,13 @@ def fields_containerA(self):
return self._fields_containerA
@property
- def fields_containerB(self):
- """Allows to connect fields_containerB input to the operator.
+ def fields_containerB(self) -> Input:
+ r"""Allows to connect fields_containerB input to the operator.
- Parameters
- ----------
- my_fields_containerB : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -267,14 +283,15 @@ def fields_containerB(self):
return self._fields_containerB
@property
- def b(self):
- """Allows to connect b input to the operator.
+ def b(self) -> Input:
+ r"""Allows to connect b input to the operator.
Double
- Parameters
- ----------
- my_b : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -287,12 +304,13 @@ def b(self):
return self._b
@property
- def fields_containerC(self):
- """Allows to connect fields_containerC input to the operator.
+ def fields_containerC(self) -> Input:
+ r"""Allows to connect fields_containerC input to the operator.
- Parameters
- ----------
- my_fields_containerC : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -323,18 +341,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.linear_combination()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/ln.py b/src/ansys/dpf/core/operators/math/ln.py
index b1ef0a08379..934005d420f 100644
--- a/src/ansys/dpf/core/operators/math/ln.py
+++ b/src/ansys/dpf/core/operators/math/ln.py
@@ -4,25 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class ln(Operator):
- """Computes element-wise ln(field[i]).
+ r"""Computes element-wise ln(field[i]).
+
Parameters
----------
- field : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
+ field: Field or FieldsContainer or float
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -52,8 +56,9 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Computes element-wise ln(field[i])."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise ln(field[i]).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -66,8 +71,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -75,14 +79,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -91,29 +95,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ln", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsLn:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsLn
+ inputs:
+ An instance of InputsLn.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsLn:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsLn
+ outputs:
+ An instance of OutputsLn.
"""
return super().outputs
@@ -136,15 +147,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -175,18 +186,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.ln()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/ln_fc.py b/src/ansys/dpf/core/operators/math/ln_fc.py
index 00c7e80903b..24bfab6a81a 100644
--- a/src/ansys/dpf/core/operators/math/ln_fc.py
+++ b/src/ansys/dpf/core/operators/math/ln_fc.py
@@ -4,25 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class ln_fc(Operator):
- """Computes element-wise ln(field[i]).
+ r"""Computes element-wise ln(field[i]).
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -52,8 +56,9 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes element-wise ln(field[i])."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise ln(field[i]).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,8 +66,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -70,14 +74,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -86,29 +90,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ln_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsLnFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsLnFc
+ inputs:
+ An instance of InputsLnFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsLnFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsLnFc
+ outputs:
+ An instance of OutputsLnFc.
"""
return super().outputs
@@ -131,15 +142,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,18 +181,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.ln_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/mac.py b/src/ansys/dpf/core/operators/math/mac.py
index 7216b8a456d..9b1e2bb4e5b 100644
--- a/src/ansys/dpf/core/operators/math/mac.py
+++ b/src/ansys/dpf/core/operators/math/mac.py
@@ -4,36 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class mac(Operator):
- """Computes MAC Matrix between two fields container, both for real and
- complex cases. For mixed cases (real-complex and complex) only the
- real part is considered. Providing inputs with the same component
- scoping is an user responsability.
+ r"""Computes MAC Matrix between two fields container, both for real and
+ complex cases. For mixed cases (real-complex and complex) only the real
+ part is considered. Providing inputs with the same component scoping is
+ an user responsability.
+
Parameters
----------
- fields_containerA : FieldsContainer
- Fields container a.
- fields_containerB : FieldsContainer
- Fields container b.
- ponderation : Field
- Field m, optional weighting for mac matrix
- computation.
+ fields_containerA: FieldsContainer
+ Fields Container A.
+ fields_containerB: FieldsContainer
+ Fields Container B.
+ weights: Field
+ Field M, optional weighting for MAC Matrix computation.
Returns
-------
- field : Field
- Mac matrix for all the combinations between
- mode fields of field container a and
- field container b. results listed row
- by row.
+ field: Field
+ MAC Matrix for all the combinations between mode fields of Field Container A and Field Container B. Results listed row by row.
Examples
--------
@@ -47,14 +48,14 @@ class mac(Operator):
>>> op.inputs.fields_containerA.connect(my_fields_containerA)
>>> my_fields_containerB = dpf.FieldsContainer()
>>> op.inputs.fields_containerB.connect(my_fields_containerB)
- >>> my_ponderation = dpf.Field()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = dpf.Field()
+ >>> op.inputs.weights.connect(my_weights)
>>> # Instantiate operator and connect inputs in one line
>>> op = dpf.operators.math.mac(
... fields_containerA=my_fields_containerA,
... fields_containerB=my_fields_containerB,
- ... ponderation=my_ponderation,
+ ... weights=my_weights,
... )
>>> # Get output data
@@ -65,9 +66,10 @@ def __init__(
self,
fields_containerA=None,
fields_containerB=None,
- ponderation=None,
+ weights=None,
config=None,
server=None,
+ ponderation=None,
):
super().__init__(name="mac", config=config, server=server)
self._inputs = InputsMac(self)
@@ -76,15 +78,18 @@ def __init__(
self.inputs.fields_containerA.connect(fields_containerA)
if fields_containerB is not None:
self.inputs.fields_containerB.connect(fields_containerB)
- if ponderation is not None:
- self.inputs.ponderation.connect(ponderation)
+ if weights is not None:
+ self.inputs.weights.connect(weights)
+ elif ponderation is not None:
+ self.inputs.weights.connect(ponderation)
@staticmethod
- def _spec():
- description = """Computes MAC Matrix between two fields container, both for real and
- complex cases. For mixed cases (real-complex and complex)
- only the real part is considered. Providing inputs with
- the same component scoping is an user responsability."""
+ def _spec() -> Specification:
+ description = r"""Computes MAC Matrix between two fields container, both for real and
+complex cases. For mixed cases (real-complex and complex) only the real
+part is considered. Providing inputs with the same component scoping is
+an user responsability.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -92,20 +97,20 @@ def _spec():
name="fields_containerA",
type_names=["fields_container"],
optional=False,
- document="""Fields container a.""",
+ document=r"""Fields Container A.""",
),
1: PinSpecification(
name="fields_containerB",
type_names=["fields_container"],
optional=False,
- document="""Fields container b.""",
+ document=r"""Fields Container B.""",
),
2: PinSpecification(
- name="ponderation",
+ name="weights",
type_names=["field"],
optional=False,
- document="""Field m, optional weighting for mac matrix
- computation.""",
+ document=r"""Field M, optional weighting for MAC Matrix computation.""",
+ aliases=["ponderation"],
),
},
map_output_pin_spec={
@@ -113,17 +118,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""Mac matrix for all the combinations between
- mode fields of field container a and
- field container b. results listed row
- by row.""",
+ document=r"""MAC Matrix for all the combinations between mode fields of Field Container A and Field Container B. Results listed row by row.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -132,29 +134,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mac", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMac:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMac
+ inputs:
+ An instance of InputsMac.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMac:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMac
+ outputs:
+ An instance of OutputsMac.
"""
return super().outputs
@@ -171,8 +180,8 @@ class InputsMac(_Inputs):
>>> op.inputs.fields_containerA.connect(my_fields_containerA)
>>> my_fields_containerB = dpf.FieldsContainer()
>>> op.inputs.fields_containerB.connect(my_fields_containerB)
- >>> my_ponderation = dpf.Field()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = dpf.Field()
+ >>> op.inputs.weights.connect(my_weights)
"""
def __init__(self, op: Operator):
@@ -181,18 +190,19 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_containerA)
self._fields_containerB = Input(mac._spec().input_pin(1), 1, op, -1)
self._inputs.append(self._fields_containerB)
- self._ponderation = Input(mac._spec().input_pin(2), 2, op, -1)
- self._inputs.append(self._ponderation)
+ self._weights = Input(mac._spec().input_pin(2), 2, op, -1)
+ self._inputs.append(self._weights)
@property
- def fields_containerA(self):
- """Allows to connect fields_containerA input to the operator.
+ def fields_containerA(self) -> Input:
+ r"""Allows to connect fields_containerA input to the operator.
- Fields container a.
+ Fields Container A.
- Parameters
- ----------
- my_fields_containerA : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -205,14 +215,15 @@ def fields_containerA(self):
return self._fields_containerA
@property
- def fields_containerB(self):
- """Allows to connect fields_containerB input to the operator.
+ def fields_containerB(self) -> Input:
+ r"""Allows to connect fields_containerB input to the operator.
- Fields container b.
+ Fields Container B.
- Parameters
- ----------
- my_fields_containerB : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -225,25 +236,37 @@ def fields_containerB(self):
return self._fields_containerB
@property
- def ponderation(self):
- """Allows to connect ponderation input to the operator.
+ def weights(self) -> Input:
+ r"""Allows to connect weights input to the operator.
- Field m, optional weighting for mac matrix
- computation.
+ Field M, optional weighting for MAC Matrix computation.
- Parameters
- ----------
- my_ponderation : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.mac()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> op.inputs.weights.connect(my_weights)
>>> # or
- >>> op.inputs.ponderation(my_ponderation)
+ >>> op.inputs.weights(my_weights)
"""
- return self._ponderation
+ return self._weights
+
+ def __getattr__(self, name):
+ if name in ["ponderation"]:
+ warn(
+ DeprecationWarning(
+ f'Operator mac: Input name "{name}" is deprecated in favor of "weights".'
+ )
+ )
+ return self.weights
+ raise AttributeError(
+ f"'{self.__class__.__name__}' object has no attribute '{name}'."
+ )
class OutputsMac(_Outputs):
@@ -264,18 +287,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
+
+ MAC Matrix for all the combinations between mode fields of Field Container A and Field Container B. Results listed row by row.
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.mac()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/make_one_on_comp.py b/src/ansys/dpf/core/operators/math/make_one_on_comp.py
index 75b857748f8..7854d1d8746 100644
--- a/src/ansys/dpf/core/operators/math/make_one_on_comp.py
+++ b/src/ansys/dpf/core/operators/math/make_one_on_comp.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class make_one_on_comp(Operator):
- """Takes the input field's scoping and creates a field full of zeros,
+ r"""Takes the input field’s scoping and creates a field full of zeros,
except for the indexes from pin 1 that will hold 1.0.
+
Parameters
----------
- fieldA : Field
- scalar_int : int
+ fieldA: Field
+ scalar_int: int
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -57,9 +62,10 @@ def __init__(self, fieldA=None, scalar_int=None, config=None, server=None):
self.inputs.scalar_int.connect(scalar_int)
@staticmethod
- def _spec():
- description = """Takes the input field's scoping and creates a field full of zeros,
- except for the indexes from pin 1 that will hold 1.0."""
+ def _spec() -> Specification:
+ description = r"""Takes the input field’s scoping and creates a field full of zeros,
+except for the indexes from pin 1 that will hold 1.0.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -67,13 +73,13 @@ def _spec():
name="fieldA",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="scalar_int",
type_names=["int32"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -81,14 +87,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -97,29 +103,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="make_one_on_comp", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMakeOneOnComp:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMakeOneOnComp
+ inputs:
+ An instance of InputsMakeOneOnComp.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMakeOneOnComp:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMakeOneOnComp
+ outputs:
+ An instance of OutputsMakeOneOnComp.
"""
return super().outputs
@@ -146,12 +159,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._scalar_int)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Parameters
- ----------
- my_fieldA : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -164,12 +178,13 @@ def fieldA(self):
return self._fieldA
@property
- def scalar_int(self):
- """Allows to connect scalar_int input to the operator.
+ def scalar_int(self) -> Input:
+ r"""Allows to connect scalar_int input to the operator.
- Parameters
- ----------
- my_scalar_int : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -200,18 +215,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.make_one_on_comp()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/matrix_inverse.py b/src/ansys/dpf/core/operators/math/matrix_inverse.py
index 1a7b15d7eb8..4e3ee2da731 100644
--- a/src/ansys/dpf/core/operators/math/matrix_inverse.py
+++ b/src/ansys/dpf/core/operators/math/matrix_inverse.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class matrix_inverse(Operator):
- """Computes the complex matrix inverse for each field in the given fields
+ r"""Computes the complex matrix inverse for each field in the given fields
container.
+
Parameters
----------
- fields_container : FieldsContainer
- Fields_container
+ fields_container: FieldsContainer
+ fields_container
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -52,9 +57,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes the complex matrix inverse for each field in the given fields
- container."""
+ def _spec() -> Specification:
+ description = r"""Computes the complex matrix inverse for each field in the given fields
+container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -62,7 +68,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fields_container""",
+ document=r"""fields_container""",
),
},
map_output_pin_spec={
@@ -70,14 +76,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -86,29 +92,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="inverseOp", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMatrixInverse:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMatrixInverse
+ inputs:
+ An instance of InputsMatrixInverse.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMatrixInverse:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMatrixInverse
+ outputs:
+ An instance of OutputsMatrixInverse.
"""
return super().outputs
@@ -131,14 +144,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields_container
+ fields_container
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -169,18 +183,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.matrix_inverse()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/min_max_over_time.py b/src/ansys/dpf/core/operators/math/min_max_over_time.py
index 3423f624dd1..3d2210045ac 100644
--- a/src/ansys/dpf/core/operators/math/min_max_over_time.py
+++ b/src/ansys/dpf/core/operators/math/min_max_over_time.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class min_max_over_time(Operator):
- """Evaluates minimum/maximum over time/frequency.
+ r"""Evaluates minimum/maximum over time/frequency.
+
Parameters
----------
- fields_container : FieldsContainer
- int32 : int
+ fields_container: FieldsContainer
+ int32: int
Define min or max.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -59,8 +64,9 @@ def __init__(self, fields_container=None, int32=None, config=None, server=None):
self.inputs.int32.connect(int32)
@staticmethod
- def _spec():
- description = """Evaluates minimum/maximum over time/frequency."""
+ def _spec() -> Specification:
+ description = r"""Evaluates minimum/maximum over time/frequency.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -68,13 +74,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
5: PinSpecification(
name="int32",
type_names=["int32"],
optional=False,
- document="""Define min or max.""",
+ document=r"""Define min or max.""",
),
},
map_output_pin_spec={
@@ -82,14 +88,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -98,31 +104,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="mechanical::min_max_over_time", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsMinMaxOverTime:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMinMaxOverTime
+ inputs:
+ An instance of InputsMinMaxOverTime.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMinMaxOverTime:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMinMaxOverTime
+ outputs:
+ An instance of OutputsMinMaxOverTime.
"""
return super().outputs
@@ -151,12 +164,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._int32)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -169,14 +183,15 @@ def fields_container(self):
return self._fields_container
@property
- def int32(self):
- """Allows to connect int32 input to the operator.
+ def int32(self) -> Input:
+ r"""Allows to connect int32 input to the operator.
Define min or max.
- Parameters
- ----------
- my_int32 : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -207,18 +222,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.min_max_over_time()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/minus.py b/src/ansys/dpf/core/operators/math/minus.py
index 35e2e6fe2fe..987e79fd394 100644
--- a/src/ansys/dpf/core/operators/math/minus.py
+++ b/src/ansys/dpf/core/operators/math/minus.py
@@ -4,31 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class minus(Operator):
- """Computes the difference of two fields. If one field's scoping has an
- 'overall' location, then this field's values are applied on the
- other field entirely. When using a constant or 'work_by_index',
- you can use 'inplace' to reuse one of the fields.
+ r"""Computes the difference of two fields. If one field’s scoping has an
+ ‘overall’ location, then this field’s values are applied on the other
+ field entirely. When using a constant or ‘work_by_index’, you can use
+ ‘inplace’ to reuse one of the fields.
+
Parameters
----------
- fieldA : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
- fieldB : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
+ fieldA: Field or FieldsContainer or float
+ field or fields container with only one field is expected
+ fieldB: Field or FieldsContainer or float
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -63,12 +66,12 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = """Computes the difference of two fields. If one field's scoping has an
- 'overall' location, then this field's values are applied
- on the other field entirely. When using a constant or
- 'work_by_index', you can use 'inplace' to reuse one of the
- fields."""
+ def _spec() -> Specification:
+ description = r"""Computes the difference of two fields. If one field’s scoping has an
+‘overall’ location, then this field’s values are applied on the other
+field entirely. When using a constant or ‘work_by_index’, you can use
+‘inplace’ to reuse one of the fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -81,8 +84,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="fieldB",
@@ -93,8 +95,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -102,14 +103,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -118,29 +119,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="minus", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMinus:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMinus
+ inputs:
+ An instance of InputsMinus.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMinus:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMinus
+ outputs:
+ An instance of OutputsMinus.
"""
return super().outputs
@@ -167,15 +175,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldA : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -188,15 +196,15 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -227,18 +235,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.minus()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/minus_fc.py b/src/ansys/dpf/core/operators/math/minus_fc.py
index 4a900033cf1..80b04069604 100644
--- a/src/ansys/dpf/core/operators/math/minus_fc.py
+++ b/src/ansys/dpf/core/operators/math/minus_fc.py
@@ -4,31 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class minus_fc(Operator):
- """Computes the difference of two fields. If one field's scoping has an
- 'overall' location, then this field's values are applied on the
- other field entirely. When using a constant or 'work_by_index',
- you can use 'inplace' to reuse one of the fields.
+ r"""Computes the difference of two fields. If one field’s scoping has an
+ ‘overall’ location, then this field’s values are applied on the other
+ field entirely. When using a constant or ‘work_by_index’, you can use
+ ‘inplace’ to reuse one of the fields.
+
Parameters
----------
- field_or_fields_container_A : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
- field_or_fields_container_B : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
+ field_or_fields_container_A: Field or FieldsContainer or float
+ field or fields container with only one field is expected
+ field_or_fields_container_B: Field or FieldsContainer or float
+ field or fields container with only one field is expected
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -69,12 +72,12 @@ def __init__(
self.inputs.field_or_fields_container_B.connect(field_or_fields_container_B)
@staticmethod
- def _spec():
- description = """Computes the difference of two fields. If one field's scoping has an
- 'overall' location, then this field's values are applied
- on the other field entirely. When using a constant or
- 'work_by_index', you can use 'inplace' to reuse one of the
- fields."""
+ def _spec() -> Specification:
+ description = r"""Computes the difference of two fields. If one field’s scoping has an
+‘overall’ location, then this field’s values are applied on the other
+field entirely. When using a constant or ‘work_by_index’, you can use
+‘inplace’ to reuse one of the fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -87,8 +90,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="field_or_fields_container_B",
@@ -99,8 +101,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -108,14 +109,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -124,29 +125,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="minus_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMinusFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMinusFc
+ inputs:
+ An instance of InputsMinusFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMinusFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMinusFc
+ outputs:
+ An instance of OutputsMinusFc.
"""
return super().outputs
@@ -177,15 +185,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field_or_fields_container_B)
@property
- def field_or_fields_container_A(self):
- """Allows to connect field_or_fields_container_A input to the operator.
+ def field_or_fields_container_A(self) -> Input:
+ r"""Allows to connect field_or_fields_container_A input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field_or_fields_container_A : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -198,15 +206,15 @@ def field_or_fields_container_A(self):
return self._field_or_fields_container_A
@property
- def field_or_fields_container_B(self):
- """Allows to connect field_or_fields_container_B input to the operator.
+ def field_or_fields_container_B(self) -> Input:
+ r"""Allows to connect field_or_fields_container_B input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field_or_fields_container_B : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -237,18 +245,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.minus_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/modal_damping_ratio.py b/src/ansys/dpf/core/operators/math/modal_damping_ratio.py
index 2edccf6c967..eef98aa1a89 100644
--- a/src/ansys/dpf/core/operators/math/modal_damping_ratio.py
+++ b/src/ansys/dpf/core/operators/math/modal_damping_ratio.py
@@ -4,34 +4,39 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class modal_damping_ratio(Operator):
- """Computes damping ratio for each mode shape as X_i = const + ratio_i +
- m_coefficient / (2*omega_i) + k_coefficient * omega_i/2.
+ r"""Computes damping ratio for each mode shape as X_i = const + ratio_i +
+ m_coefficient / (2\ *omega_i) + k_coefficient* omega_i/2.
+
Parameters
----------
- natural_freq :
- Input vector expects natural frequencies.
- const_ratio : float, optional
- Constant modal damping ratio
- ratio_by_modes : optional
- Modal damping ratio for each mode shape
- m_coefficient : float
- Global mass matrix multiplier
- k_coefficient : float
- Global stiffness matrix multiplier
+ natural_freq:
+ input vector expects natural frequencies.
+ const_ratio: float, optional
+ constant modal damping ratio
+ ratio_by_modes: optional
+ modal damping ratio for each mode shape
+ m_coefficient: float
+ global mass matrix multiplier
+ k_coefficient: float
+ global stiffness matrix multiplier
Returns
-------
- field : Field
- Field of modal damping ratio.
+ field: Field
+ field of modal damping ratio.
Examples
--------
@@ -90,9 +95,10 @@ def __init__(
self.inputs.k_coefficient.connect(k_coefficient)
@staticmethod
- def _spec():
- description = """Computes damping ratio for each mode shape as X_i = const + ratio_i +
- m_coefficient / (2*omega_i) + k_coefficient * omega_i/2."""
+ def _spec() -> Specification:
+ description = r"""Computes damping ratio for each mode shape as X_i = const + ratio_i +
+m_coefficient / (2\ *omega_i) + k_coefficient* omega_i/2.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -100,31 +106,31 @@ def _spec():
name="natural_freq",
type_names=["vector"],
optional=False,
- document="""Input vector expects natural frequencies.""",
+ document=r"""input vector expects natural frequencies.""",
),
1: PinSpecification(
name="const_ratio",
type_names=["double"],
optional=True,
- document="""Constant modal damping ratio""",
+ document=r"""constant modal damping ratio""",
),
2: PinSpecification(
name="ratio_by_modes",
type_names=["vector"],
optional=True,
- document="""Modal damping ratio for each mode shape""",
+ document=r"""modal damping ratio for each mode shape""",
),
3: PinSpecification(
name="m_coefficient",
type_names=["double"],
optional=False,
- document="""Global mass matrix multiplier""",
+ document=r"""global mass matrix multiplier""",
),
4: PinSpecification(
name="k_coefficient",
type_names=["double"],
optional=False,
- document="""Global stiffness matrix multiplier""",
+ document=r"""global stiffness matrix multiplier""",
),
},
map_output_pin_spec={
@@ -132,14 +138,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""Field of modal damping ratio.""",
+ document=r"""field of modal damping ratio.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -148,29 +154,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="modal_damping_ratio", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsModalDampingRatio:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsModalDampingRatio
+ inputs:
+ An instance of InputsModalDampingRatio.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsModalDampingRatio:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsModalDampingRatio
+ outputs:
+ An instance of OutputsModalDampingRatio.
"""
return super().outputs
@@ -211,14 +224,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._k_coefficient)
@property
- def natural_freq(self):
- """Allows to connect natural_freq input to the operator.
+ def natural_freq(self) -> Input:
+ r"""Allows to connect natural_freq input to the operator.
- Input vector expects natural frequencies.
+ input vector expects natural frequencies.
- Parameters
- ----------
- my_natural_freq :
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -231,14 +245,15 @@ def natural_freq(self):
return self._natural_freq
@property
- def const_ratio(self):
- """Allows to connect const_ratio input to the operator.
+ def const_ratio(self) -> Input:
+ r"""Allows to connect const_ratio input to the operator.
- Constant modal damping ratio
+ constant modal damping ratio
- Parameters
- ----------
- my_const_ratio : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -251,14 +266,15 @@ def const_ratio(self):
return self._const_ratio
@property
- def ratio_by_modes(self):
- """Allows to connect ratio_by_modes input to the operator.
+ def ratio_by_modes(self) -> Input:
+ r"""Allows to connect ratio_by_modes input to the operator.
- Modal damping ratio for each mode shape
+ modal damping ratio for each mode shape
- Parameters
- ----------
- my_ratio_by_modes :
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -271,14 +287,15 @@ def ratio_by_modes(self):
return self._ratio_by_modes
@property
- def m_coefficient(self):
- """Allows to connect m_coefficient input to the operator.
+ def m_coefficient(self) -> Input:
+ r"""Allows to connect m_coefficient input to the operator.
- Global mass matrix multiplier
+ global mass matrix multiplier
- Parameters
- ----------
- my_m_coefficient : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -291,14 +308,15 @@ def m_coefficient(self):
return self._m_coefficient
@property
- def k_coefficient(self):
- """Allows to connect k_coefficient input to the operator.
+ def k_coefficient(self) -> Input:
+ r"""Allows to connect k_coefficient input to the operator.
- Global stiffness matrix multiplier
+ global stiffness matrix multiplier
- Parameters
- ----------
- my_k_coefficient : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -329,18 +347,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
+
+ field of modal damping ratio.
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.modal_damping_ratio()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/modal_superposition.py b/src/ansys/dpf/core/operators/math/modal_superposition.py
index e690a8c520c..bc24c9565bb 100644
--- a/src/ansys/dpf/core/operators/math/modal_superposition.py
+++ b/src/ansys/dpf/core/operators/math/modal_superposition.py
@@ -4,42 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class modal_superposition(Operator):
- """Computes the solution in the time/frequency space from a modal
- solution by multiplying a modal basis (in 0) by the solution in
- this modal space (coefficients for each mode for each
- time/frequency) (in 1).
+ r"""Computes the solution in the time/frequency space from a modal solution
+ by multiplying a modal basis (in 0) by the solution in this modal space
+ (coefficients for each mode for each time/frequency) (in 1).
+
Parameters
----------
- modal_basis : FieldsContainer
- One field by mode with each field
- representing a mode shape on nodes or
- elements.
- solution_in_modal_space : FieldsContainer
- One field by time/frequency with each field
- having a ponderating coefficient for
- each mode of the modal_basis pin.
- time_scoping : Scoping, optional
- Compute the result on a subset of the time
- frequency domain defined in the
- solution_in_modal_space fields
- container.
- mesh_scoping : Scoping or ScopingsContainer, optional
- Compute the result on a subset of the space
- domain defined in the modal_basis
- fields container.
+ modal_basis: FieldsContainer
+ One field by mode with each field representing a mode shape on nodes or elements.
+ solution_in_modal_space: FieldsContainer
+ One field by time/frequency with each field having a ponderating coefficient for each mode of the modal_basis pin.
+ time_scoping: Scoping, optional
+ Compute the result on a subset of the time frequency domain defined in the solution_in_modal_space fields container.
+ mesh_scoping: Scoping or ScopingsContainer, optional
+ Compute the result on a subset of the space domain defined in the modal_basis fields container.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -94,11 +89,11 @@ def __init__(
self.inputs.mesh_scoping.connect(mesh_scoping)
@staticmethod
- def _spec():
- description = """Computes the solution in the time/frequency space from a modal
- solution by multiplying a modal basis (in 0) by the
- solution in this modal space (coefficients for each mode
- for each time/frequency) (in 1)."""
+ def _spec() -> Specification:
+ description = r"""Computes the solution in the time/frequency space from a modal solution
+by multiplying a modal basis (in 0) by the solution in this modal space
+(coefficients for each mode for each time/frequency) (in 1).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -106,34 +101,25 @@ def _spec():
name="modal_basis",
type_names=["fields_container"],
optional=False,
- document="""One field by mode with each field
- representing a mode shape on nodes or
- elements.""",
+ document=r"""One field by mode with each field representing a mode shape on nodes or elements.""",
),
1: PinSpecification(
name="solution_in_modal_space",
type_names=["fields_container"],
optional=False,
- document="""One field by time/frequency with each field
- having a ponderating coefficient for
- each mode of the modal_basis pin.""",
+ document=r"""One field by time/frequency with each field having a ponderating coefficient for each mode of the modal_basis pin.""",
),
3: PinSpecification(
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""Compute the result on a subset of the time
- frequency domain defined in the
- solution_in_modal_space fields
- container.""",
+ document=r"""Compute the result on a subset of the time frequency domain defined in the solution_in_modal_space fields container.""",
),
4: PinSpecification(
name="mesh_scoping",
type_names=["scoping", "scopings_container"],
optional=True,
- document="""Compute the result on a subset of the space
- domain defined in the modal_basis
- fields container.""",
+ document=r"""Compute the result on a subset of the space domain defined in the modal_basis fields container.""",
),
},
map_output_pin_spec={
@@ -141,14 +127,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -157,31 +143,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="expansion::modal_superposition", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsModalSuperposition:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsModalSuperposition
+ inputs:
+ An instance of InputsModalSuperposition.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsModalSuperposition:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsModalSuperposition
+ outputs:
+ An instance of OutputsModalSuperposition.
"""
return super().outputs
@@ -218,16 +211,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh_scoping)
@property
- def modal_basis(self):
- """Allows to connect modal_basis input to the operator.
+ def modal_basis(self) -> Input:
+ r"""Allows to connect modal_basis input to the operator.
- One field by mode with each field
- representing a mode shape on nodes or
- elements.
+ One field by mode with each field representing a mode shape on nodes or elements.
- Parameters
- ----------
- my_modal_basis : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -240,16 +232,15 @@ def modal_basis(self):
return self._modal_basis
@property
- def solution_in_modal_space(self):
- """Allows to connect solution_in_modal_space input to the operator.
+ def solution_in_modal_space(self) -> Input:
+ r"""Allows to connect solution_in_modal_space input to the operator.
- One field by time/frequency with each field
- having a ponderating coefficient for
- each mode of the modal_basis pin.
+ One field by time/frequency with each field having a ponderating coefficient for each mode of the modal_basis pin.
- Parameters
- ----------
- my_solution_in_modal_space : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -262,17 +253,15 @@ def solution_in_modal_space(self):
return self._solution_in_modal_space
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Compute the result on a subset of the time
- frequency domain defined in the
- solution_in_modal_space fields
- container.
+ Compute the result on a subset of the time frequency domain defined in the solution_in_modal_space fields container.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -285,16 +274,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Compute the result on a subset of the space
- domain defined in the modal_basis
- fields container.
+ Compute the result on a subset of the space domain defined in the modal_basis fields container.
- Parameters
- ----------
- my_mesh_scoping : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -327,18 +315,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.modal_superposition()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/modulus.py b/src/ansys/dpf/core/operators/math/modulus.py
index ffe6605470f..aca9229abad 100644
--- a/src/ansys/dpf/core/operators/math/modulus.py
+++ b/src/ansys/dpf/core/operators/math/modulus.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class modulus(Operator):
- """Computes element-wise modulus of field containers containing complex
+ r"""Computes element-wise modulus of field containers containing complex
fields.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -51,9 +56,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes element-wise modulus of field containers containing complex
- fields."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise modulus of field containers containing complex
+fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,7 +67,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -69,14 +75,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -85,29 +91,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="modulus", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsModulus:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsModulus
+ inputs:
+ An instance of InputsModulus.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsModulus:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsModulus
+ outputs:
+ An instance of OutputsModulus.
"""
return super().outputs
@@ -130,12 +143,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -166,18 +180,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.modulus()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/norm.py b/src/ansys/dpf/core/operators/math/norm.py
index dc1252d78f0..cc3ee2abae2 100644
--- a/src/ansys/dpf/core/operators/math/norm.py
+++ b/src/ansys/dpf/core/operators/math/norm.py
@@ -4,29 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class norm(Operator):
- """Computes the element-wise Lp norm of the field elementary data.
- Default Lp=L2
+ r"""Computes the element-wise Lp norm of the field elementary data. Default
+ Lp=L2
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- scalar_int : int, optional
- Lp normalisation type, p = 1, 2, ...n -
- default lp=l2
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ scalar_int: int, optional
+ Lp normalisation type, p = 1, 2, ...n - Default Lp=L2
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -61,9 +64,10 @@ def __init__(self, field=None, scalar_int=None, config=None, server=None):
self.inputs.scalar_int.connect(scalar_int)
@staticmethod
- def _spec():
- description = """Computes the element-wise Lp norm of the field elementary data.
- Default Lp=L2"""
+ def _spec() -> Specification:
+ description = r"""Computes the element-wise Lp norm of the field elementary data. Default
+Lp=L2
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -71,15 +75,13 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="scalar_int",
type_names=["int32"],
optional=True,
- document="""Lp normalisation type, p = 1, 2, ...n -
- default lp=l2""",
+ document=r"""Lp normalisation type, p = 1, 2, ...n - Default Lp=L2""",
),
},
map_output_pin_spec={
@@ -87,14 +89,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -103,29 +105,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="norm", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsNorm:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsNorm
+ inputs:
+ An instance of InputsNorm.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsNorm:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsNorm
+ outputs:
+ An instance of OutputsNorm.
"""
return super().outputs
@@ -152,15 +161,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._scalar_int)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -173,15 +182,15 @@ def field(self):
return self._field
@property
- def scalar_int(self):
- """Allows to connect scalar_int input to the operator.
+ def scalar_int(self) -> Input:
+ r"""Allows to connect scalar_int input to the operator.
- Lp normalisation type, p = 1, 2, ...n -
- default lp=l2
+ Lp normalisation type, p = 1, 2, ...n - Default Lp=L2
- Parameters
- ----------
- my_scalar_int : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -212,18 +221,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.norm()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/norm_fc.py b/src/ansys/dpf/core/operators/math/norm_fc.py
index fa014538532..62d726a6420 100644
--- a/src/ansys/dpf/core/operators/math/norm_fc.py
+++ b/src/ansys/dpf/core/operators/math/norm_fc.py
@@ -4,27 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class norm_fc(Operator):
- """Computes the element-wise L2 norm of the field elementary data. This
+ r"""Computes the element-wise L2 norm of the field elementary data. This
process is applied on each field of the input fields container.
+
Parameters
----------
- fields_container : FieldsContainer
- scalar_int : int, optional
- Lp normalisation type, p = 1, 2, ...n -
- default lp=2
+ fields_container: FieldsContainer
+ scalar_int: int, optional
+ Lp normalisation type, p = 1, 2, ...n - Default Lp=2
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -61,10 +65,10 @@ def __init__(
self.inputs.scalar_int.connect(scalar_int)
@staticmethod
- def _spec():
- description = """Computes the element-wise L2 norm of the field elementary data. This
- process is applied on each field of the input fields
- container."""
+ def _spec() -> Specification:
+ description = r"""Computes the element-wise L2 norm of the field elementary data. This
+process is applied on each field of the input fields container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -72,14 +76,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="scalar_int",
type_names=["int32"],
optional=True,
- document="""Lp normalisation type, p = 1, 2, ...n -
- default lp=2""",
+ document=r"""Lp normalisation type, p = 1, 2, ...n - Default Lp=2""",
),
},
map_output_pin_spec={
@@ -87,14 +90,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -103,29 +106,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="norm_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsNormFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsNormFc
+ inputs:
+ An instance of InputsNormFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsNormFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsNormFc
+ outputs:
+ An instance of OutputsNormFc.
"""
return super().outputs
@@ -152,12 +162,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._scalar_int)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,15 +181,15 @@ def fields_container(self):
return self._fields_container
@property
- def scalar_int(self):
- """Allows to connect scalar_int input to the operator.
+ def scalar_int(self) -> Input:
+ r"""Allows to connect scalar_int input to the operator.
- Lp normalisation type, p = 1, 2, ...n -
- default lp=2
+ Lp normalisation type, p = 1, 2, ...n - Default Lp=2
- Parameters
- ----------
- my_scalar_int : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -209,18 +220,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.norm_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/outer_product.py b/src/ansys/dpf/core/operators/math/outer_product.py
index 7d008fdf772..e6faf33161a 100644
--- a/src/ansys/dpf/core/operators/math/outer_product.py
+++ b/src/ansys/dpf/core/operators/math/outer_product.py
@@ -4,28 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class outer_product(Operator):
- """Computes the outer product of two vector fields.
+ r"""Computes the outer product of two vector fields.
+
Parameters
----------
- fieldA : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
- fieldB : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
+ fieldA: Field or FieldsContainer or float
+ field or fields container with only one field is expected
+ fieldB: Field or FieldsContainer or float
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -60,8 +63,9 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = """Computes the outer product of two vector fields."""
+ def _spec() -> Specification:
+ description = r"""Computes the outer product of two vector fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -74,8 +78,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="fieldB",
@@ -86,8 +89,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -95,14 +97,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -111,29 +113,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="outer_product", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsOuterProduct:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsOuterProduct
+ inputs:
+ An instance of InputsOuterProduct.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsOuterProduct:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsOuterProduct
+ outputs:
+ An instance of OutputsOuterProduct.
"""
return super().outputs
@@ -160,15 +169,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldA : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -181,15 +190,15 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -220,18 +229,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.outer_product()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/overall_dot.py b/src/ansys/dpf/core/operators/math/overall_dot.py
index 26b0bf78d8f..07237baa974 100644
--- a/src/ansys/dpf/core/operators/math/overall_dot.py
+++ b/src/ansys/dpf/core/operators/math/overall_dot.py
@@ -4,27 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class overall_dot(Operator):
- """Computes a dot product between two fields (fields are seen like a
- single large vector) and returns a scalar.
+ r"""Computes a dot product between two fields (fields are seen like a single
+ large vector) and returns a scalar.
+
Parameters
----------
- fieldA : Field
- fieldB : Field
+ fieldA: Field
+ fieldB: Field
Returns
-------
- field : Field
- Field defined on over-all location, contains
- a unique scalar value
+ field: Field
+ Field defined on over-all location, contains a unique scalar value
Examples
--------
@@ -59,9 +63,10 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = """Computes a dot product between two fields (fields are seen like a
- single large vector) and returns a scalar."""
+ def _spec() -> Specification:
+ description = r"""Computes a dot product between two fields (fields are seen like a single
+large vector) and returns a scalar.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -69,13 +74,13 @@ def _spec():
name="fieldA",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="fieldB",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -83,15 +88,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""Field defined on over-all location, contains
- a unique scalar value""",
+ document=r"""Field defined on over-all location, contains a unique scalar value""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -100,29 +104,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="native::overall_dot", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsOverallDot:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsOverallDot
+ inputs:
+ An instance of InputsOverallDot.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsOverallDot:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsOverallDot
+ outputs:
+ An instance of OutputsOverallDot.
"""
return super().outputs
@@ -149,12 +160,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Parameters
- ----------
- my_fieldA : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -167,12 +179,13 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Parameters
- ----------
- my_fieldB : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -203,18 +216,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
+
+ Field defined on over-all location, contains a unique scalar value
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.overall_dot()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/phase.py b/src/ansys/dpf/core/operators/math/phase.py
index 94aba376fd9..bcacb7be1db 100644
--- a/src/ansys/dpf/core/operators/math/phase.py
+++ b/src/ansys/dpf/core/operators/math/phase.py
@@ -4,28 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class phase(Operator):
- """Computes the phase (in rad) between a real and an imaginary field.
+ r"""Computes the phase (in rad) between a real and an imaginary field.
+
Parameters
----------
- fieldA : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- fieldB : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ fieldA: Field or FieldsContainer
+ field or fields container with only one field is expected
+ fieldB: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -60,10 +63,9 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = (
- """Computes the phase (in rad) between a real and an imaginary field."""
- )
+ def _spec() -> Specification:
+ description = r"""Computes the phase (in rad) between a real and an imaginary field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -71,15 +73,13 @@ def _spec():
name="fieldA",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="fieldB",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -87,14 +87,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -103,29 +103,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="phase", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsPhase:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsPhase
+ inputs:
+ An instance of InputsPhase.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsPhase:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsPhase
+ outputs:
+ An instance of OutputsPhase.
"""
return super().outputs
@@ -152,15 +159,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldA : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -173,15 +180,15 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -212,18 +219,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.phase()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/phase_fc.py b/src/ansys/dpf/core/operators/math/phase_fc.py
index 4aed3a71693..689f31be4d9 100644
--- a/src/ansys/dpf/core/operators/math/phase_fc.py
+++ b/src/ansys/dpf/core/operators/math/phase_fc.py
@@ -4,23 +4,28 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class phase_fc(Operator):
- """Computes phase (in rad) between real and imaginary fields.
+ r"""Computes phase (in rad) between real and imaginary fields.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -50,8 +55,9 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes phase (in rad) between real and imaginary fields."""
+ def _spec() -> Specification:
+ description = r"""Computes phase (in rad) between real and imaginary fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -59,7 +65,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -67,14 +73,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -83,29 +89,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="phase_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsPhaseFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsPhaseFc
+ inputs:
+ An instance of InputsPhaseFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsPhaseFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsPhaseFc
+ outputs:
+ An instance of OutputsPhaseFc.
"""
return super().outputs
@@ -128,12 +141,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -164,18 +178,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.phase_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/polar_to_cplx.py b/src/ansys/dpf/core/operators/math/polar_to_cplx.py
index d928b0f66fa..ee7f03fe75c 100644
--- a/src/ansys/dpf/core/operators/math/polar_to_cplx.py
+++ b/src/ansys/dpf/core/operators/math/polar_to_cplx.py
@@ -4,23 +4,28 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class polar_to_cplx(Operator):
- """Converts a complex number from polar form to complex.
+ r"""Converts a complex number from polar form to complex.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -50,8 +55,9 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Converts a complex number from polar form to complex."""
+ def _spec() -> Specification:
+ description = r"""Converts a complex number from polar form to complex.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -59,7 +65,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -67,14 +73,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -83,29 +89,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="polar_to_cplx", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsPolarToCplx:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsPolarToCplx
+ inputs:
+ An instance of InputsPolarToCplx.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsPolarToCplx:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsPolarToCplx
+ outputs:
+ An instance of OutputsPolarToCplx.
"""
return super().outputs
@@ -128,12 +141,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -164,18 +178,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.polar_to_cplx()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/pow.py b/src/ansys/dpf/core/operators/math/pow.py
index cbc22782e28..fc38c286516 100644
--- a/src/ansys/dpf/core/operators/math/pow.py
+++ b/src/ansys/dpf/core/operators/math/pow.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class pow(Operator):
- """Computes element-wise field[i]^p.
+ r"""Computes element-wise field[i]^p.
+
Parameters
----------
- field : Field
- factor : float
+ field: Field
+ factor: float
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -56,8 +61,9 @@ def __init__(self, field=None, factor=None, config=None, server=None):
self.inputs.factor.connect(factor)
@staticmethod
- def _spec():
- description = """Computes element-wise field[i]^p."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise field[i]^p.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,13 +71,13 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="factor",
type_names=["double"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -79,14 +85,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -95,29 +101,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="Pow", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsPow:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsPow
+ inputs:
+ An instance of InputsPow.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsPow:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsPow
+ outputs:
+ An instance of OutputsPow.
"""
return super().outputs
@@ -144,12 +157,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._factor)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -162,12 +176,13 @@ def field(self):
return self._field
@property
- def factor(self):
- """Allows to connect factor input to the operator.
+ def factor(self) -> Input:
+ r"""Allows to connect factor input to the operator.
- Parameters
- ----------
- my_factor : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -198,18 +213,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.pow()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/pow_fc.py b/src/ansys/dpf/core/operators/math/pow_fc.py
index 2e3fd0486e2..a8b82078a1b 100644
--- a/src/ansys/dpf/core/operators/math/pow_fc.py
+++ b/src/ansys/dpf/core/operators/math/pow_fc.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class pow_fc(Operator):
- """Computes element-wise field[i]^p.
+ r"""Computes element-wise field[i]^p.
+
Parameters
----------
- fields_container : FieldsContainer
- factor : float
+ fields_container: FieldsContainer
+ factor: float
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -56,8 +61,9 @@ def __init__(self, fields_container=None, factor=None, config=None, server=None)
self.inputs.factor.connect(factor)
@staticmethod
- def _spec():
- description = """Computes element-wise field[i]^p."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise field[i]^p.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,13 +71,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="factor",
type_names=["double"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -79,14 +85,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -95,29 +101,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="Pow_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsPowFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsPowFc
+ inputs:
+ An instance of InputsPowFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsPowFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsPowFc
+ outputs:
+ An instance of OutputsPowFc.
"""
return super().outputs
@@ -144,12 +157,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._factor)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -162,12 +176,13 @@ def fields_container(self):
return self._fields_container
@property
- def factor(self):
- """Allows to connect factor input to the operator.
+ def factor(self) -> Input:
+ r"""Allows to connect factor input to the operator.
- Parameters
- ----------
- my_factor : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -198,18 +213,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.pow_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/qr_solve.py b/src/ansys/dpf/core/operators/math/qr_solve.py
index ecdcb684425..a624f0a2112 100644
--- a/src/ansys/dpf/core/operators/math/qr_solve.py
+++ b/src/ansys/dpf/core/operators/math/qr_solve.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class qr_solve(Operator):
- """Computes the solution using QR factorization.
+ r"""Computes the solution using QR factorization.
+
Parameters
----------
- fields_container : FieldsContainer
- Fields_container
- rhs : FieldsContainer
- Fields_container
+ fields_container: FieldsContainer
+ fields_container
+ rhs: FieldsContainer
+ fields_container
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -58,8 +63,9 @@ def __init__(self, fields_container=None, rhs=None, config=None, server=None):
self.inputs.rhs.connect(rhs)
@staticmethod
- def _spec():
- description = """Computes the solution using QR factorization."""
+ def _spec() -> Specification:
+ description = r"""Computes the solution using QR factorization.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -67,13 +73,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fields_container""",
+ document=r"""fields_container""",
),
1: PinSpecification(
name="rhs",
type_names=["fields_container"],
optional=False,
- document="""Fields_container""",
+ document=r"""fields_container""",
),
},
map_output_pin_spec={
@@ -81,14 +87,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -97,29 +103,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="qrsolveOp", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsQrSolve:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsQrSolve
+ inputs:
+ An instance of InputsQrSolve.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsQrSolve:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsQrSolve
+ outputs:
+ An instance of OutputsQrSolve.
"""
return super().outputs
@@ -146,14 +159,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._rhs)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields_container
+ fields_container
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -166,14 +180,15 @@ def fields_container(self):
return self._fields_container
@property
- def rhs(self):
- """Allows to connect rhs input to the operator.
+ def rhs(self) -> Input:
+ r"""Allows to connect rhs input to the operator.
- Fields_container
+ fields_container
- Parameters
- ----------
- my_rhs : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -204,18 +219,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.qr_solve()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/real_part.py b/src/ansys/dpf/core/operators/math/real_part.py
index fa068400cbc..efe1aaaad0c 100644
--- a/src/ansys/dpf/core/operators/math/real_part.py
+++ b/src/ansys/dpf/core/operators/math/real_part.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class real_part(Operator):
- """Extracts element-wise real part of field containers containing complex
+ r"""Extracts element-wise real part of field containers containing complex
fields.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -51,9 +56,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Extracts element-wise real part of field containers containing complex
- fields."""
+ def _spec() -> Specification:
+ description = r"""Extracts element-wise real part of field containers containing complex
+fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,7 +67,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -69,14 +75,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -85,29 +91,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="realP_part", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsRealPart:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsRealPart
+ inputs:
+ An instance of InputsRealPart.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsRealPart:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsRealPart
+ outputs:
+ An instance of OutputsRealPart.
"""
return super().outputs
@@ -130,12 +143,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -166,18 +180,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.real_part()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/relative_error.py b/src/ansys/dpf/core/operators/math/relative_error.py
index 8da912e00c9..f1b5c0e7d91 100644
--- a/src/ansys/dpf/core/operators/math/relative_error.py
+++ b/src/ansys/dpf/core/operators/math/relative_error.py
@@ -4,35 +4,36 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class relative_error(Operator):
- """Computes the relative error between a reference scalar field and
- another scalar field.
+ r"""Computes the relative error between a reference scalar field and another
+ scalar field.
+
Parameters
----------
- value : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
- reference : Field or FieldsContainer or float
- Field or fields container with only one field
- is expected
+ value: Field or FieldsContainer or float
+ field or fields container with only one field is expected
+ reference: Field or FieldsContainer or float
+ field or fields container with only one field is expected
Returns
-------
- field : Field
- zero_ref_scoping : Scoping
- Ids of entities where reference value is
- zero.
- no_ref_scoping : Scoping
- Ids of entities where there are no reference
- value.
+ field: Field
+ zero_ref_scoping: Scoping
+ Ids of entities where reference value is zero.
+ no_ref_scoping: Scoping
+ Ids of entities where there are no reference value.
Examples
--------
@@ -69,9 +70,10 @@ def __init__(self, value=None, reference=None, config=None, server=None):
self.inputs.reference.connect(reference)
@staticmethod
- def _spec():
- description = """Computes the relative error between a reference scalar field and
- another scalar field."""
+ def _spec() -> Specification:
+ description = r"""Computes the relative error between a reference scalar field and another
+scalar field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -84,8 +86,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="reference",
@@ -96,8 +97,7 @@ def _spec():
"vector",
],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -105,28 +105,26 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="zero_ref_scoping",
type_names=["scoping"],
optional=False,
- document="""Ids of entities where reference value is
- zero.""",
+ document=r"""Ids of entities where reference value is zero.""",
),
2: PinSpecification(
name="no_ref_scoping",
type_names=["scoping"],
optional=False,
- document="""Ids of entities where there are no reference
- value.""",
+ document=r"""Ids of entities where there are no reference value.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -135,29 +133,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="relative_error", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsRelativeError:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsRelativeError
+ inputs:
+ An instance of InputsRelativeError.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsRelativeError:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsRelativeError
+ outputs:
+ An instance of OutputsRelativeError.
"""
return super().outputs
@@ -184,15 +189,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._reference)
@property
- def value(self):
- """Allows to connect value input to the operator.
+ def value(self) -> Input:
+ r"""Allows to connect value input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_value : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -205,15 +210,15 @@ def value(self):
return self._value
@property
- def reference(self):
- """Allows to connect reference input to the operator.
+ def reference(self) -> Input:
+ r"""Allows to connect reference input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_reference : Field or FieldsContainer or float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -250,52 +255,59 @@ def __init__(self, op: Operator):
self._outputs.append(self._no_ref_scoping)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.relative_error()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
@property
- def zero_ref_scoping(self):
- """Allows to get zero_ref_scoping output of the operator
+ def zero_ref_scoping(self) -> Output:
+ r"""Allows to get zero_ref_scoping output of the operator
+
+ Ids of entities where reference value is zero.
Returns
- ----------
- my_zero_ref_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.relative_error()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_zero_ref_scoping = op.outputs.zero_ref_scoping()
- """ # noqa: E501
+ """
return self._zero_ref_scoping
@property
- def no_ref_scoping(self):
- """Allows to get no_ref_scoping output of the operator
+ def no_ref_scoping(self) -> Output:
+ r"""Allows to get no_ref_scoping output of the operator
+
+ Ids of entities where there are no reference value.
Returns
- ----------
- my_no_ref_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.relative_error()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_no_ref_scoping = op.outputs.no_ref_scoping()
- """ # noqa: E501
+ """
return self._no_ref_scoping
diff --git a/src/ansys/dpf/core/operators/math/scale.py b/src/ansys/dpf/core/operators/math/scale.py
index a317ae31f89..02f56b9defb 100644
--- a/src/ansys/dpf/core/operators/math/scale.py
+++ b/src/ansys/dpf/core/operators/math/scale.py
@@ -4,39 +4,38 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class scale(Operator):
- """Scales a field by a constant factor. This factor can be a scalar or a
+ r"""Scales a field by a constant factor. This factor can be a scalar or a
vector, where each value of the vector represents a scaler per
- component. Number of the components are corresponding to the input
- field dimensionality
+ component. Number of the components are corresponding to the input field
+ dimensionality
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- ponderation : float or Field
- Double/field/vector of doubles. when scoped
- on overall, same value(s) applied on
- all the data, when scoped elsewhere,
- corresponding values will be
- multiplied due to the scoping
- boolean : bool, optional
- Default is false. if set to true, output of
- scale is made dimensionless
- algorithm : int, optional
- Default is 0 use mkl. if set to 1, don't
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ weights: float or Field
+ Double/Field/Vector of doubles. When scoped on overall, same value(s) applied on all the data, when scoped elsewhere, corresponding values will be multiplied due to the scoping
+ boolean: bool, optional
+ Default is false. If set to true, output of scale is made dimensionless
+ algorithm: int, optional
+ Default is 0 use mkl. If set to 1, don't
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -48,8 +47,8 @@ class scale(Operator):
>>> # Make input connections
>>> my_field = dpf.Field()
>>> op.inputs.field.connect(my_field)
- >>> my_ponderation = float()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = float()
+ >>> op.inputs.weights.connect(my_weights)
>>> my_boolean = bool()
>>> op.inputs.boolean.connect(my_boolean)
>>> my_algorithm = int()
@@ -58,7 +57,7 @@ class scale(Operator):
>>> # Instantiate operator and connect inputs in one line
>>> op = dpf.operators.math.scale(
... field=my_field,
- ... ponderation=my_ponderation,
+ ... weights=my_weights,
... boolean=my_boolean,
... algorithm=my_algorithm,
... )
@@ -70,30 +69,34 @@ class scale(Operator):
def __init__(
self,
field=None,
- ponderation=None,
+ weights=None,
boolean=None,
algorithm=None,
config=None,
server=None,
+ ponderation=None,
):
super().__init__(name="scale", config=config, server=server)
self._inputs = InputsScale(self)
self._outputs = OutputsScale(self)
if field is not None:
self.inputs.field.connect(field)
- if ponderation is not None:
- self.inputs.ponderation.connect(ponderation)
+ if weights is not None:
+ self.inputs.weights.connect(weights)
+ elif ponderation is not None:
+ self.inputs.weights.connect(ponderation)
if boolean is not None:
self.inputs.boolean.connect(boolean)
if algorithm is not None:
self.inputs.algorithm.connect(algorithm)
@staticmethod
- def _spec():
- description = """Scales a field by a constant factor. This factor can be a scalar or a
- vector, where each value of the vector represents a scaler
- per component. Number of the components are corresponding
- to the input field dimensionality"""
+ def _spec() -> Specification:
+ description = r"""Scales a field by a constant factor. This factor can be a scalar or a
+vector, where each value of the vector represents a scaler per
+component. Number of the components are corresponding to the input field
+dimensionality
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -101,31 +104,26 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
- name="ponderation",
+ name="weights",
type_names=["double", "field", "vector"],
optional=False,
- document="""Double/field/vector of doubles. when scoped
- on overall, same value(s) applied on
- all the data, when scoped elsewhere,
- corresponding values will be
- multiplied due to the scoping""",
+ document=r"""Double/Field/Vector of doubles. When scoped on overall, same value(s) applied on all the data, when scoped elsewhere, corresponding values will be multiplied due to the scoping""",
+ aliases=["ponderation"],
),
2: PinSpecification(
name="boolean",
type_names=["bool"],
optional=True,
- document="""Default is false. if set to true, output of
- scale is made dimensionless""",
+ document=r"""Default is false. If set to true, output of scale is made dimensionless""",
),
3: PinSpecification(
name="algorithm",
type_names=["int32"],
optional=True,
- document="""Default is 0 use mkl. if set to 1, don't""",
+ document=r"""Default is 0 use mkl. If set to 1, don't""",
),
},
map_output_pin_spec={
@@ -133,14 +131,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -149,29 +147,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="scale", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsScale:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsScale
+ inputs:
+ An instance of InputsScale.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsScale:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsScale
+ outputs:
+ An instance of OutputsScale.
"""
return super().outputs
@@ -186,8 +191,8 @@ class InputsScale(_Inputs):
>>> op = dpf.operators.math.scale()
>>> my_field = dpf.Field()
>>> op.inputs.field.connect(my_field)
- >>> my_ponderation = float()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = float()
+ >>> op.inputs.weights.connect(my_weights)
>>> my_boolean = bool()
>>> op.inputs.boolean.connect(my_boolean)
>>> my_algorithm = int()
@@ -198,23 +203,23 @@ def __init__(self, op: Operator):
super().__init__(scale._spec().inputs, op)
self._field = Input(scale._spec().input_pin(0), 0, op, -1)
self._inputs.append(self._field)
- self._ponderation = Input(scale._spec().input_pin(1), 1, op, -1)
- self._inputs.append(self._ponderation)
+ self._weights = Input(scale._spec().input_pin(1), 1, op, -1)
+ self._inputs.append(self._weights)
self._boolean = Input(scale._spec().input_pin(2), 2, op, -1)
self._inputs.append(self._boolean)
self._algorithm = Input(scale._spec().input_pin(3), 3, op, -1)
self._inputs.append(self._algorithm)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -227,39 +232,36 @@ def field(self):
return self._field
@property
- def ponderation(self):
- """Allows to connect ponderation input to the operator.
+ def weights(self) -> Input:
+ r"""Allows to connect weights input to the operator.
- Double/field/vector of doubles. when scoped
- on overall, same value(s) applied on
- all the data, when scoped elsewhere,
- corresponding values will be
- multiplied due to the scoping
+ Double/Field/Vector of doubles. When scoped on overall, same value(s) applied on all the data, when scoped elsewhere, corresponding values will be multiplied due to the scoping
- Parameters
- ----------
- my_ponderation : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.scale()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> op.inputs.weights.connect(my_weights)
>>> # or
- >>> op.inputs.ponderation(my_ponderation)
+ >>> op.inputs.weights(my_weights)
"""
- return self._ponderation
+ return self._weights
@property
- def boolean(self):
- """Allows to connect boolean input to the operator.
+ def boolean(self) -> Input:
+ r"""Allows to connect boolean input to the operator.
- Default is false. if set to true, output of
- scale is made dimensionless
+ Default is false. If set to true, output of scale is made dimensionless
- Parameters
- ----------
- my_boolean : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -272,14 +274,15 @@ def boolean(self):
return self._boolean
@property
- def algorithm(self):
- """Allows to connect algorithm input to the operator.
+ def algorithm(self) -> Input:
+ r"""Allows to connect algorithm input to the operator.
- Default is 0 use mkl. if set to 1, don't
+ Default is 0 use mkl. If set to 1, don't
- Parameters
- ----------
- my_algorithm : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -291,6 +294,18 @@ def algorithm(self):
"""
return self._algorithm
+ def __getattr__(self, name):
+ if name in ["ponderation"]:
+ warn(
+ DeprecationWarning(
+ f'Operator scale: Input name "{name}" is deprecated in favor of "weights".'
+ )
+ )
+ return self.weights
+ raise AttributeError(
+ f"'{self.__class__.__name__}' object has no attribute '{name}'."
+ )
+
class OutputsScale(_Outputs):
"""Intermediate class used to get outputs from
@@ -310,18 +325,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.scale()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/scale_by_field.py b/src/ansys/dpf/core/operators/math/scale_by_field.py
index d3ee6e3c24b..2302291a7cf 100644
--- a/src/ansys/dpf/core/operators/math/scale_by_field.py
+++ b/src/ansys/dpf/core/operators/math/scale_by_field.py
@@ -4,30 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class scale_by_field(Operator):
- """DEPRECATED, PLEASE USE SCALE. Scales a field (in 0) by a scalar field
- (in 1). If one field's scoping has an 'overall' location, then
- this field's values are applied on the other field entirely.
+ r"""DEPRECATED, PLEASE USE SCALE. Scales a field (in 0) by a scalar field
+ (in 1). If one field’s scoping has an ‘overall’ location, then this
+ field’s values are applied on the other field entirely.
+
Parameters
----------
- fieldA : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- fieldB : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ fieldA: Field or FieldsContainer
+ field or fields container with only one field is expected
+ fieldB: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -62,11 +65,11 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = """DEPRECATED, PLEASE USE SCALE. Scales a field (in 0) by a scalar field
- (in 1). If one field's scoping has an 'overall' location,
- then this field's values are applied on the other field
- entirely."""
+ def _spec() -> Specification:
+ description = r"""DEPRECATED, PLEASE USE SCALE. Scales a field (in 0) by a scalar field
+(in 1). If one field’s scoping has an ‘overall’ location, then this
+field’s values are applied on the other field entirely.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -74,15 +77,13 @@ def _spec():
name="fieldA",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="fieldB",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -90,14 +91,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -106,29 +107,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="scale_by_field", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsScaleByField:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsScaleByField
+ inputs:
+ An instance of InputsScaleByField.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsScaleByField:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsScaleByField
+ outputs:
+ An instance of OutputsScaleByField.
"""
return super().outputs
@@ -155,15 +163,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldA : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -176,15 +184,15 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fieldB : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -215,18 +223,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.scale_by_field()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/scale_by_field_fc.py b/src/ansys/dpf/core/operators/math/scale_by_field_fc.py
index 2d1b90f965f..53d0034152f 100644
--- a/src/ansys/dpf/core/operators/math/scale_by_field_fc.py
+++ b/src/ansys/dpf/core/operators/math/scale_by_field_fc.py
@@ -4,30 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class scale_by_field_fc(Operator):
- """DEPRECATED, PLEASE USE SCALE. Scales a field (in 0) by a scalar field
- (in 1). If one field's scoping has an 'overall' location, then
- this field's values are applied on the other field entirely.
+ r"""DEPRECATED, PLEASE USE SCALE. Scales a field (in 0) by a scalar field
+ (in 1). If one field’s scoping has an ‘overall’ location, then this
+ field’s values are applied on the other field entirely.
+
Parameters
----------
- field_or_fields_container_A : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- field_or_fields_container_B : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ field_or_fields_container_A: Field or FieldsContainer
+ field or fields container with only one field is expected
+ field_or_fields_container_B: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -68,11 +71,11 @@ def __init__(
self.inputs.field_or_fields_container_B.connect(field_or_fields_container_B)
@staticmethod
- def _spec():
- description = """DEPRECATED, PLEASE USE SCALE. Scales a field (in 0) by a scalar field
- (in 1). If one field's scoping has an 'overall' location,
- then this field's values are applied on the other field
- entirely."""
+ def _spec() -> Specification:
+ description = r"""DEPRECATED, PLEASE USE SCALE. Scales a field (in 0) by a scalar field
+(in 1). If one field’s scoping has an ‘overall’ location, then this
+field’s values are applied on the other field entirely.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -80,15 +83,13 @@ def _spec():
name="field_or_fields_container_A",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="field_or_fields_container_B",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -96,14 +97,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -112,29 +113,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="scale_by_field_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsScaleByFieldFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsScaleByFieldFc
+ inputs:
+ An instance of InputsScaleByFieldFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsScaleByFieldFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsScaleByFieldFc
+ outputs:
+ An instance of OutputsScaleByFieldFc.
"""
return super().outputs
@@ -165,15 +173,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field_or_fields_container_B)
@property
- def field_or_fields_container_A(self):
- """Allows to connect field_or_fields_container_A input to the operator.
+ def field_or_fields_container_A(self) -> Input:
+ r"""Allows to connect field_or_fields_container_A input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field_or_fields_container_A : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -186,15 +194,15 @@ def field_or_fields_container_A(self):
return self._field_or_fields_container_A
@property
- def field_or_fields_container_B(self):
- """Allows to connect field_or_fields_container_B input to the operator.
+ def field_or_fields_container_B(self) -> Input:
+ r"""Allows to connect field_or_fields_container_B input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field_or_fields_container_B : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -225,18 +233,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.scale_by_field_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/scale_fc.py b/src/ansys/dpf/core/operators/math/scale_fc.py
index c23a02cfd7b..314e7d96982 100644
--- a/src/ansys/dpf/core/operators/math/scale_fc.py
+++ b/src/ansys/dpf/core/operators/math/scale_fc.py
@@ -4,39 +4,38 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class scale_fc(Operator):
- """Scales a fields container by a constant factor. This factor can be a
- scalar, a vector a field, a fields container, where each value of
- the vector represents a scaler per component. Number of the
- components are corresponding to the input field dimensionality
+ r"""Scales a fields container by a constant factor. This factor can be a
+ scalar, a vector a field, a fields container, where each value of the
+ vector represents a scaler per component. Number of the components are
+ corresponding to the input field dimensionality
+
Parameters
----------
- fields_container : FieldsContainer
- Fields container to be scaled
- ponderation : float or Field or FieldsContainer
- Double/vector of
- doubles/field/fieldscontainer. when
- scoped on overall, same value(s)
- applied on all the data, when scoped
- elsewhere, corresponding values will
- be multiplied due to the scoping
- boolean : bool, optional
- Default is false. if set to true, output of
- scale is made dimensionless
- algorithm : int, optional
- Default is 0 use mkl. if set to 1, don't
+ fields_container: FieldsContainer
+ fields container to be scaled
+ weights: float or Field or FieldsContainer
+ Double/Vector of doubles/Field/FieldsContainer. When scoped on overall, same value(s) applied on all the data, when scoped elsewhere, corresponding values will be multiplied due to the scoping
+ boolean: bool, optional
+ Default is false. If set to true, output of scale is made dimensionless
+ algorithm: int, optional
+ Default is 0 use mkl. If set to 1, don't
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -48,8 +47,8 @@ class scale_fc(Operator):
>>> # Make input connections
>>> my_fields_container = dpf.FieldsContainer()
>>> op.inputs.fields_container.connect(my_fields_container)
- >>> my_ponderation = float()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = float()
+ >>> op.inputs.weights.connect(my_weights)
>>> my_boolean = bool()
>>> op.inputs.boolean.connect(my_boolean)
>>> my_algorithm = int()
@@ -58,7 +57,7 @@ class scale_fc(Operator):
>>> # Instantiate operator and connect inputs in one line
>>> op = dpf.operators.math.scale_fc(
... fields_container=my_fields_container,
- ... ponderation=my_ponderation,
+ ... weights=my_weights,
... boolean=my_boolean,
... algorithm=my_algorithm,
... )
@@ -70,31 +69,34 @@ class scale_fc(Operator):
def __init__(
self,
fields_container=None,
- ponderation=None,
+ weights=None,
boolean=None,
algorithm=None,
config=None,
server=None,
+ ponderation=None,
):
super().__init__(name="scale_fc", config=config, server=server)
self._inputs = InputsScaleFc(self)
self._outputs = OutputsScaleFc(self)
if fields_container is not None:
self.inputs.fields_container.connect(fields_container)
- if ponderation is not None:
- self.inputs.ponderation.connect(ponderation)
+ if weights is not None:
+ self.inputs.weights.connect(weights)
+ elif ponderation is not None:
+ self.inputs.weights.connect(ponderation)
if boolean is not None:
self.inputs.boolean.connect(boolean)
if algorithm is not None:
self.inputs.algorithm.connect(algorithm)
@staticmethod
- def _spec():
- description = """Scales a fields container by a constant factor. This factor can be a
- scalar, a vector a field, a fields container, where each
- value of the vector represents a scaler per component.
- Number of the components are corresponding to the input
- field dimensionality"""
+ def _spec() -> Specification:
+ description = r"""Scales a fields container by a constant factor. This factor can be a
+scalar, a vector a field, a fields container, where each value of the
+vector represents a scaler per component. Number of the components are
+corresponding to the input field dimensionality
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -102,10 +104,10 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fields container to be scaled""",
+ document=r"""fields container to be scaled""",
),
1: PinSpecification(
- name="ponderation",
+ name="weights",
type_names=[
"double",
"field",
@@ -113,25 +115,20 @@ def _spec():
"fields_container",
],
optional=False,
- document="""Double/vector of
- doubles/field/fieldscontainer. when
- scoped on overall, same value(s)
- applied on all the data, when scoped
- elsewhere, corresponding values will
- be multiplied due to the scoping""",
+ document=r"""Double/Vector of doubles/Field/FieldsContainer. When scoped on overall, same value(s) applied on all the data, when scoped elsewhere, corresponding values will be multiplied due to the scoping""",
+ aliases=["ponderation"],
),
2: PinSpecification(
name="boolean",
type_names=["bool"],
optional=True,
- document="""Default is false. if set to true, output of
- scale is made dimensionless""",
+ document=r"""Default is false. If set to true, output of scale is made dimensionless""",
),
3: PinSpecification(
name="algorithm",
type_names=["int32"],
optional=True,
- document="""Default is 0 use mkl. if set to 1, don't""",
+ document=r"""Default is 0 use mkl. If set to 1, don't""",
),
},
map_output_pin_spec={
@@ -139,14 +136,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -155,29 +152,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="scale_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsScaleFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsScaleFc
+ inputs:
+ An instance of InputsScaleFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsScaleFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsScaleFc
+ outputs:
+ An instance of OutputsScaleFc.
"""
return super().outputs
@@ -192,8 +196,8 @@ class InputsScaleFc(_Inputs):
>>> op = dpf.operators.math.scale_fc()
>>> my_fields_container = dpf.FieldsContainer()
>>> op.inputs.fields_container.connect(my_fields_container)
- >>> my_ponderation = float()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> my_weights = float()
+ >>> op.inputs.weights.connect(my_weights)
>>> my_boolean = bool()
>>> op.inputs.boolean.connect(my_boolean)
>>> my_algorithm = int()
@@ -204,22 +208,23 @@ def __init__(self, op: Operator):
super().__init__(scale_fc._spec().inputs, op)
self._fields_container = Input(scale_fc._spec().input_pin(0), 0, op, -1)
self._inputs.append(self._fields_container)
- self._ponderation = Input(scale_fc._spec().input_pin(1), 1, op, -1)
- self._inputs.append(self._ponderation)
+ self._weights = Input(scale_fc._spec().input_pin(1), 1, op, -1)
+ self._inputs.append(self._weights)
self._boolean = Input(scale_fc._spec().input_pin(2), 2, op, -1)
self._inputs.append(self._boolean)
self._algorithm = Input(scale_fc._spec().input_pin(3), 3, op, -1)
self._inputs.append(self._algorithm)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container to be scaled
+ fields container to be scaled
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -232,40 +237,36 @@ def fields_container(self):
return self._fields_container
@property
- def ponderation(self):
- """Allows to connect ponderation input to the operator.
+ def weights(self) -> Input:
+ r"""Allows to connect weights input to the operator.
- Double/vector of
- doubles/field/fieldscontainer. when
- scoped on overall, same value(s)
- applied on all the data, when scoped
- elsewhere, corresponding values will
- be multiplied due to the scoping
+ Double/Vector of doubles/Field/FieldsContainer. When scoped on overall, same value(s) applied on all the data, when scoped elsewhere, corresponding values will be multiplied due to the scoping
- Parameters
- ----------
- my_ponderation : float or Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.scale_fc()
- >>> op.inputs.ponderation.connect(my_ponderation)
+ >>> op.inputs.weights.connect(my_weights)
>>> # or
- >>> op.inputs.ponderation(my_ponderation)
+ >>> op.inputs.weights(my_weights)
"""
- return self._ponderation
+ return self._weights
@property
- def boolean(self):
- """Allows to connect boolean input to the operator.
+ def boolean(self) -> Input:
+ r"""Allows to connect boolean input to the operator.
- Default is false. if set to true, output of
- scale is made dimensionless
+ Default is false. If set to true, output of scale is made dimensionless
- Parameters
- ----------
- my_boolean : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -278,14 +279,15 @@ def boolean(self):
return self._boolean
@property
- def algorithm(self):
- """Allows to connect algorithm input to the operator.
+ def algorithm(self) -> Input:
+ r"""Allows to connect algorithm input to the operator.
- Default is 0 use mkl. if set to 1, don't
+ Default is 0 use mkl. If set to 1, don't
- Parameters
- ----------
- my_algorithm : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -297,6 +299,18 @@ def algorithm(self):
"""
return self._algorithm
+ def __getattr__(self, name):
+ if name in ["ponderation"]:
+ warn(
+ DeprecationWarning(
+ f'Operator scale_fc: Input name "{name}" is deprecated in favor of "weights".'
+ )
+ )
+ return self.weights
+ raise AttributeError(
+ f"'{self.__class__.__name__}' object has no attribute '{name}'."
+ )
+
class OutputsScaleFc(_Outputs):
"""Intermediate class used to get outputs from
@@ -316,18 +330,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.scale_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/sin.py b/src/ansys/dpf/core/operators/math/sin.py
index a37f49b25a9..7dc10862b01 100644
--- a/src/ansys/dpf/core/operators/math/sin.py
+++ b/src/ansys/dpf/core/operators/math/sin.py
@@ -4,23 +4,28 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class sin(Operator):
- """Computes element-wise sin(field[i]).
+ r"""Computes element-wise sin(field[i]).
+
Parameters
----------
- field : Field
+ field: Field
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -50,8 +55,9 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Computes element-wise sin(field[i])."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise sin(field[i]).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -59,7 +65,7 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -67,14 +73,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -83,29 +89,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="sin", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSin:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSin
+ inputs:
+ An instance of InputsSin.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSin:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSin
+ outputs:
+ An instance of OutputsSin.
"""
return super().outputs
@@ -128,12 +141,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -164,18 +178,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.sin()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/sin_fc.py b/src/ansys/dpf/core/operators/math/sin_fc.py
index 04489a2c8bb..84002fd5df5 100644
--- a/src/ansys/dpf/core/operators/math/sin_fc.py
+++ b/src/ansys/dpf/core/operators/math/sin_fc.py
@@ -4,23 +4,28 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class sin_fc(Operator):
- """Computes element-wise sin(field[i]).
+ r"""Computes element-wise sin(field[i]).
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -50,8 +55,9 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes element-wise sin(field[i])."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise sin(field[i]).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -59,7 +65,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -67,14 +73,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -83,29 +89,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="sin_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSinFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSinFc
+ inputs:
+ An instance of InputsSinFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSinFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSinFc
+ outputs:
+ An instance of OutputsSinFc.
"""
return super().outputs
@@ -128,12 +141,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -164,18 +178,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.sin_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/sqr.py b/src/ansys/dpf/core/operators/math/sqr.py
index 73be26baef4..754e26a4af3 100644
--- a/src/ansys/dpf/core/operators/math/sqr.py
+++ b/src/ansys/dpf/core/operators/math/sqr.py
@@ -4,25 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class sqr(Operator):
- """Computes element-wise field[i]^2.
+ r"""Computes element-wise field[i]^2.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -52,8 +56,9 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Computes element-wise field[i]^2."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise field[i]^2.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,8 +66,7 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -70,14 +74,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -86,29 +90,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="sqr", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSqr:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSqr
+ inputs:
+ An instance of InputsSqr.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSqr:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSqr
+ outputs:
+ An instance of OutputsSqr.
"""
return super().outputs
@@ -131,15 +142,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,18 +181,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.sqr()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/sqr_fc.py b/src/ansys/dpf/core/operators/math/sqr_fc.py
index 21be326fc41..724860e7096 100644
--- a/src/ansys/dpf/core/operators/math/sqr_fc.py
+++ b/src/ansys/dpf/core/operators/math/sqr_fc.py
@@ -4,25 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class sqr_fc(Operator):
- """Computes element-wise field[i]^2.
+ r"""Computes element-wise field[i]^2.
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -52,8 +56,9 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes element-wise field[i]^2."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise field[i]^2.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,8 +66,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -70,14 +74,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -86,29 +90,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="sqr_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSqrFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSqrFc
+ inputs:
+ An instance of InputsSqrFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSqrFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSqrFc
+ outputs:
+ An instance of OutputsSqrFc.
"""
return super().outputs
@@ -131,15 +142,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,18 +181,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.sqr_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/sqrt.py b/src/ansys/dpf/core/operators/math/sqrt.py
index 19edff852d9..1948c9bffca 100644
--- a/src/ansys/dpf/core/operators/math/sqrt.py
+++ b/src/ansys/dpf/core/operators/math/sqrt.py
@@ -4,25 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class sqrt(Operator):
- """Computes element-wise sqrt(field1).
+ r"""Computes element-wise sqrt(field1).
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -52,8 +56,9 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Computes element-wise sqrt(field1)."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise sqrt(field1).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,8 +66,7 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -70,14 +74,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -86,29 +90,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="sqrt", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSqrt:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSqrt
+ inputs:
+ An instance of InputsSqrt.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSqrt:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSqrt
+ outputs:
+ An instance of OutputsSqrt.
"""
return super().outputs
@@ -131,15 +142,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,18 +181,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.sqrt()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/sqrt_fc.py b/src/ansys/dpf/core/operators/math/sqrt_fc.py
index 1e5b20162f4..3d53b132b36 100644
--- a/src/ansys/dpf/core/operators/math/sqrt_fc.py
+++ b/src/ansys/dpf/core/operators/math/sqrt_fc.py
@@ -4,25 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class sqrt_fc(Operator):
- """Computes element-wise sqrt(field1).
+ r"""Computes element-wise sqrt(field1).
+
Parameters
----------
- fields_container : FieldsContainer
- Field or fields container with only one field
- is expected
+ fields_container: FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -52,8 +56,9 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes element-wise sqrt(field1)."""
+ def _spec() -> Specification:
+ description = r"""Computes element-wise sqrt(field1).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,8 +66,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -70,14 +74,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -86,29 +90,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="sqrt_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSqrtFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSqrtFc
+ inputs:
+ An instance of InputsSqrtFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSqrtFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSqrtFc
+ outputs:
+ An instance of OutputsSqrtFc.
"""
return super().outputs
@@ -131,15 +142,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,18 +181,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.sqrt_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/svd.py b/src/ansys/dpf/core/operators/math/svd.py
index ef8e3805c45..96576bd1ecb 100644
--- a/src/ansys/dpf/core/operators/math/svd.py
+++ b/src/ansys/dpf/core/operators/math/svd.py
@@ -4,31 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class svd(Operator):
- """Computes the matrix singular value decomposition (SVD) for each field
- in the given fields container.
+ r"""Computes the matrix singular value decomposition (SVD) for each field in
+ the given fields container.
+
Parameters
----------
- fields_container : FieldsContainer
- Fields_container
+ fields_container: FieldsContainer
+ fields_container
Returns
-------
- s_svd : FieldsContainer
- Singular values of the input data, where
- a=u.s.vt
- u_svd : FieldsContainer
- U of the input data, where a=u.s.vt
- vt_svd : FieldsContainer
- Vt of the input data, where a=u.s.vt
+ s_svd: FieldsContainer
+ Singular values of the input data, where A=U.S.Vt
+ u_svd: FieldsContainer
+ U of the input data, where A=U.S.Vt
+ vt_svd: FieldsContainer
+ Vt of the input data, where A=U.S.Vt
Examples
--------
@@ -60,9 +64,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Computes the matrix singular value decomposition (SVD) for each field
- in the given fields container."""
+ def _spec() -> Specification:
+ description = r"""Computes the matrix singular value decomposition (SVD) for each field in
+the given fields container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -70,7 +75,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fields_container""",
+ document=r"""fields_container""",
),
},
map_output_pin_spec={
@@ -78,27 +83,26 @@ def _spec():
name="s_svd",
type_names=["fields_container"],
optional=False,
- document="""Singular values of the input data, where
- a=u.s.vt""",
+ document=r"""Singular values of the input data, where A=U.S.Vt""",
),
1: PinSpecification(
name="u_svd",
type_names=["fields_container"],
optional=False,
- document="""U of the input data, where a=u.s.vt""",
+ document=r"""U of the input data, where A=U.S.Vt""",
),
2: PinSpecification(
name="vt_svd",
type_names=["fields_container"],
optional=False,
- document="""Vt of the input data, where a=u.s.vt""",
+ document=r"""Vt of the input data, where A=U.S.Vt""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -107,29 +111,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="svdOp", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSvd:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSvd
+ inputs:
+ An instance of InputsSvd.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSvd:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSvd
+ outputs:
+ An instance of OutputsSvd.
"""
return super().outputs
@@ -152,14 +163,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields_container
+ fields_container
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -196,52 +208,61 @@ def __init__(self, op: Operator):
self._outputs.append(self._vt_svd)
@property
- def s_svd(self):
- """Allows to get s_svd output of the operator
+ def s_svd(self) -> Output:
+ r"""Allows to get s_svd output of the operator
+
+ Singular values of the input data, where A=U.S.Vt
Returns
- ----------
- my_s_svd : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.svd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_s_svd = op.outputs.s_svd()
- """ # noqa: E501
+ """
return self._s_svd
@property
- def u_svd(self):
- """Allows to get u_svd output of the operator
+ def u_svd(self) -> Output:
+ r"""Allows to get u_svd output of the operator
+
+ U of the input data, where A=U.S.Vt
Returns
- ----------
- my_u_svd : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.svd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_u_svd = op.outputs.u_svd()
- """ # noqa: E501
+ """
return self._u_svd
@property
- def vt_svd(self):
- """Allows to get vt_svd output of the operator
+ def vt_svd(self) -> Output:
+ r"""Allows to get vt_svd output of the operator
+
+ Vt of the input data, where A=U.S.Vt
Returns
- ----------
- my_vt_svd : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.svd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_vt_svd = op.outputs.vt_svd()
- """ # noqa: E501
+ """
return self._vt_svd
diff --git a/src/ansys/dpf/core/operators/math/sweeping_phase.py b/src/ansys/dpf/core/operators/math/sweeping_phase.py
index 131a58af4ff..6127d70f642 100644
--- a/src/ansys/dpf/core/operators/math/sweeping_phase.py
+++ b/src/ansys/dpf/core/operators/math/sweeping_phase.py
@@ -4,39 +4,38 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class sweeping_phase(Operator):
- """Shifts the phase of a real and an imaginary field (in 0 and 1) of a
+ r"""Shifts the phase of a real and an imaginary field (in 0 and 1) of a
given angle (in 3) of a unit (in 4).
+
Parameters
----------
- real_field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- imaginary_field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- angle : float
- unit_name : str, optional
- String unit. supported values: "deg" or
- "rad". default: "rad".
- abs_value : bool
- imaginary_part_null : bool
- If the imaginary part field is empty and this
- pin is true, then the imaginary part
- is supposed to be 0 (default is
- false).
+ real_field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ imaginary_field: Field or FieldsContainer
+ field or fields container with only one field is expected
+ angle: float
+ unit_name: str, optional
+ String Unit. Supported values: "deg" or "rad". Default: "rad".
+ abs_value: bool
+ imaginary_part_null: bool
+ If the imaginary part field is empty and this pin is true, then the imaginary part is supposed to be 0 (default is false).
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -101,9 +100,10 @@ def __init__(
self.inputs.imaginary_part_null.connect(imaginary_part_null)
@staticmethod
- def _spec():
- description = """Shifts the phase of a real and an imaginary field (in 0 and 1) of a
- given angle (in 3) of a unit (in 4)."""
+ def _spec() -> Specification:
+ description = r"""Shifts the phase of a real and an imaginary field (in 0 and 1) of a
+given angle (in 3) of a unit (in 4).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -111,43 +111,37 @@ def _spec():
name="real_field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
1: PinSpecification(
name="imaginary_field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
2: PinSpecification(
name="angle",
type_names=["double"],
optional=False,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="unit_name",
type_names=["string"],
optional=True,
- document="""String unit. supported values: "deg" or
- "rad". default: "rad".""",
+ document=r"""String Unit. Supported values: "deg" or "rad". Default: "rad".""",
),
4: PinSpecification(
name="abs_value",
type_names=["bool"],
optional=False,
- document="""""",
+ document=r"""""",
),
5: PinSpecification(
name="imaginary_part_null",
type_names=["bool"],
optional=False,
- document="""If the imaginary part field is empty and this
- pin is true, then the imaginary part
- is supposed to be 0 (default is
- false).""",
+ document=r"""If the imaginary part field is empty and this pin is true, then the imaginary part is supposed to be 0 (default is false).""",
),
},
map_output_pin_spec={
@@ -155,14 +149,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -171,29 +165,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="sweeping_phase", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSweepingPhase:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSweepingPhase
+ inputs:
+ An instance of InputsSweepingPhase.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSweepingPhase:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSweepingPhase
+ outputs:
+ An instance of OutputsSweepingPhase.
"""
return super().outputs
@@ -238,15 +239,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._imaginary_part_null)
@property
- def real_field(self):
- """Allows to connect real_field input to the operator.
+ def real_field(self) -> Input:
+ r"""Allows to connect real_field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_real_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -259,15 +260,15 @@ def real_field(self):
return self._real_field
@property
- def imaginary_field(self):
- """Allows to connect imaginary_field input to the operator.
+ def imaginary_field(self) -> Input:
+ r"""Allows to connect imaginary_field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_imaginary_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -280,12 +281,13 @@ def imaginary_field(self):
return self._imaginary_field
@property
- def angle(self):
- """Allows to connect angle input to the operator.
+ def angle(self) -> Input:
+ r"""Allows to connect angle input to the operator.
- Parameters
- ----------
- my_angle : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -298,15 +300,15 @@ def angle(self):
return self._angle
@property
- def unit_name(self):
- """Allows to connect unit_name input to the operator.
+ def unit_name(self) -> Input:
+ r"""Allows to connect unit_name input to the operator.
- String unit. supported values: "deg" or
- "rad". default: "rad".
+ String Unit. Supported values: "deg" or "rad". Default: "rad".
- Parameters
- ----------
- my_unit_name : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -319,12 +321,13 @@ def unit_name(self):
return self._unit_name
@property
- def abs_value(self):
- """Allows to connect abs_value input to the operator.
+ def abs_value(self) -> Input:
+ r"""Allows to connect abs_value input to the operator.
- Parameters
- ----------
- my_abs_value : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -337,17 +340,15 @@ def abs_value(self):
return self._abs_value
@property
- def imaginary_part_null(self):
- """Allows to connect imaginary_part_null input to the operator.
+ def imaginary_part_null(self) -> Input:
+ r"""Allows to connect imaginary_part_null input to the operator.
- If the imaginary part field is empty and this
- pin is true, then the imaginary part
- is supposed to be 0 (default is
- false).
+ If the imaginary part field is empty and this pin is true, then the imaginary part is supposed to be 0 (default is false).
- Parameters
- ----------
- my_imaginary_part_null : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -378,18 +379,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.sweeping_phase()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/sweeping_phase_fc.py b/src/ansys/dpf/core/operators/math/sweeping_phase_fc.py
index 37c171ed299..5f8dd79c20d 100644
--- a/src/ansys/dpf/core/operators/math/sweeping_phase_fc.py
+++ b/src/ansys/dpf/core/operators/math/sweeping_phase_fc.py
@@ -4,29 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class sweeping_phase_fc(Operator):
- """Shifts the phase of all the corresponding real and imaginary fields of
- a fields container for a given angle (in 2) of a unit (in 4).
+ r"""Shifts the phase of all the corresponding real and imaginary fields of a
+ fields container for a given angle (in 2) of a unit (in 4).
+
Parameters
----------
- fields_container : FieldsContainer
- angle : float
- unit_name : str, optional
- String unit. supported values: "deg" or
- "rad". default: "rad".
- abs_value : bool
+ fields_container: FieldsContainer
+ angle: float
+ unit_name: str, optional
+ String Unit. Supported values: "deg" or "rad". Default: "rad".
+ abs_value: bool
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -79,10 +83,10 @@ def __init__(
self.inputs.abs_value.connect(abs_value)
@staticmethod
- def _spec():
- description = """Shifts the phase of all the corresponding real and imaginary fields of
- a fields container for a given angle (in 2) of a unit (in
- 4)."""
+ def _spec() -> Specification:
+ description = r"""Shifts the phase of all the corresponding real and imaginary fields of a
+fields container for a given angle (in 2) of a unit (in 4).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -90,26 +94,25 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="angle",
type_names=["double"],
optional=False,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="unit_name",
type_names=["string"],
optional=True,
- document="""String unit. supported values: "deg" or
- "rad". default: "rad".""",
+ document=r"""String Unit. Supported values: "deg" or "rad". Default: "rad".""",
),
4: PinSpecification(
name="abs_value",
type_names=["bool"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -117,14 +120,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -133,29 +136,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="sweeping_phase_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSweepingPhaseFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSweepingPhaseFc
+ inputs:
+ An instance of InputsSweepingPhaseFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSweepingPhaseFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSweepingPhaseFc
+ outputs:
+ An instance of OutputsSweepingPhaseFc.
"""
return super().outputs
@@ -192,12 +202,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._abs_value)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -210,12 +221,13 @@ def fields_container(self):
return self._fields_container
@property
- def angle(self):
- """Allows to connect angle input to the operator.
+ def angle(self) -> Input:
+ r"""Allows to connect angle input to the operator.
- Parameters
- ----------
- my_angle : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -228,15 +240,15 @@ def angle(self):
return self._angle
@property
- def unit_name(self):
- """Allows to connect unit_name input to the operator.
+ def unit_name(self) -> Input:
+ r"""Allows to connect unit_name input to the operator.
- String unit. supported values: "deg" or
- "rad". default: "rad".
+ String Unit. Supported values: "deg" or "rad". Default: "rad".
- Parameters
- ----------
- my_unit_name : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -249,12 +261,13 @@ def unit_name(self):
return self._unit_name
@property
- def abs_value(self):
- """Allows to connect abs_value input to the operator.
+ def abs_value(self) -> Input:
+ r"""Allows to connect abs_value input to the operator.
- Parameters
- ----------
- my_abs_value : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -285,18 +298,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.sweeping_phase_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/time_derivation.py b/src/ansys/dpf/core/operators/math/time_derivation.py
index 69e9328aca0..33014a7f239 100644
--- a/src/ansys/dpf/core/operators/math/time_derivation.py
+++ b/src/ansys/dpf/core/operators/math/time_derivation.py
@@ -4,27 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class time_derivation(Operator):
- """Derives a field of time varying quantities with respect to time
+ r"""Derives a field of time varying quantities with respect to time
+
Parameters
----------
- field : Field
- Field
- spline_fitting : bool, optional
- Uses spline fitting on the input field to
- compute smooth derivatives
+ field: Field
+ field
+ spline_fitting: bool, optional
+ Uses spline fitting on the input field to compute smooth derivatives
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -59,10 +63,9 @@ def __init__(self, field=None, spline_fitting=None, config=None, server=None):
self.inputs.spline_fitting.connect(spline_fitting)
@staticmethod
- def _spec():
- description = (
- """Derives a field of time varying quantities with respect to time"""
- )
+ def _spec() -> Specification:
+ description = r"""Derives a field of time varying quantities with respect to time
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -70,14 +73,13 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""Field""",
+ document=r"""field""",
),
1: PinSpecification(
name="spline_fitting",
type_names=["bool"],
optional=True,
- document="""Uses spline fitting on the input field to
- compute smooth derivatives""",
+ document=r"""Uses spline fitting on the input field to compute smooth derivatives""",
),
},
map_output_pin_spec={
@@ -85,14 +87,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -101,29 +103,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="TimeDerivation", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsTimeDerivation:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTimeDerivation
+ inputs:
+ An instance of InputsTimeDerivation.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTimeDerivation:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTimeDerivation
+ outputs:
+ An instance of OutputsTimeDerivation.
"""
return super().outputs
@@ -150,14 +159,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._spline_fitting)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field
+ field
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,15 +180,15 @@ def field(self):
return self._field
@property
- def spline_fitting(self):
- """Allows to connect spline_fitting input to the operator.
+ def spline_fitting(self) -> Input:
+ r"""Allows to connect spline_fitting input to the operator.
- Uses spline fitting on the input field to
- compute smooth derivatives
+ Uses spline fitting on the input field to compute smooth derivatives
- Parameters
- ----------
- my_spline_fitting : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -209,18 +219,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.time_derivation()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/time_freq_interpolation.py b/src/ansys/dpf/core/operators/math/time_freq_interpolation.py
index 043433a0fcb..636d9e1b27a 100644
--- a/src/ansys/dpf/core/operators/math/time_freq_interpolation.py
+++ b/src/ansys/dpf/core/operators/math/time_freq_interpolation.py
@@ -4,48 +4,42 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class time_freq_interpolation(Operator):
- """Interpolates between all the matching fields of a fields container at
+ r"""Interpolates between all the matching fields of a fields container at
given times or frequencies, using ramped: fieldOut =
- field1*(1.-fact)+field2*(fact), or stepped: fieldOut=field2. If
- the time freq is higher than the max available, the field at the
- max time freq is taken. Computes the output time freq support to
- support the fields container
+ field1\ *(1.-fact)+field2*\ (fact), or stepped: fieldOut=field2. If the
+ time freq is higher than the max available, the field at the max time
+ freq is taken. Computes the output time freq support to support the
+ fields container
+
Parameters
----------
- fields_container : FieldsContainer
- time_freq_values : float or Field
- List of frequencies or times needed. to
- specify load steps, put a field (and
- not a list) in input with a scoping
- located on "timefreq_steps".
- step : int, optional
- If a field is set as input, the step ids
- should be its scoping.
- interpolation_type : int, optional
+ fields_container: FieldsContainer
+ time_freq_values: float or Field
+ list of frequencies or times needed. To specify load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps".
+ step: int, optional
+ if a Field is set as input, the step ids should be its scoping.
+ interpolation_type: int, optional
1 is ramped, 2 is stepped, default is 1.
- force_new_time_freq_support : bool, optional
- If set to true, the output fields container
- will always have a new time freq
- support rescoped to the output
- time_freq_values (default is false).
- if set to false, the time freq
- support is only recreated when time
- or frequency values are between
- existing ones.
- time_freq_support : TimeFreqSupport, optional
+ force_new_time_freq_support: bool, optional
+ If set to true, the output fields container will always have a new time freq support rescoped to the output time_freq_values (default is false). If set to false, the time freq support is only recreated when time or frequency values are between existing ones.
+ time_freq_support: TimeFreqSupport, optional
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -110,14 +104,14 @@ def __init__(
self.inputs.time_freq_support.connect(time_freq_support)
@staticmethod
- def _spec():
- description = """Interpolates between all the matching fields of a fields container at
- given times or frequencies, using ramped: fieldOut =
- field1*(1.-fact)+field2*(fact), or stepped:
- fieldOut=field2. If the time freq is higher than the max
- available, the field at the max time freq is taken.
- Computes the output time freq support to support the
- fields container"""
+ def _spec() -> Specification:
+ description = r"""Interpolates between all the matching fields of a fields container at
+given times or frequencies, using ramped: fieldOut =
+field1\ *(1.-fact)+field2*\ (fact), or stepped: fieldOut=field2. If the
+time freq is higher than the max available, the field at the max time
+freq is taken. Computes the output time freq support to support the
+fields container
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -125,48 +119,37 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="time_freq_values",
type_names=["double", "vector", "field"],
optional=False,
- document="""List of frequencies or times needed. to
- specify load steps, put a field (and
- not a list) in input with a scoping
- located on "timefreq_steps".""",
+ document=r"""list of frequencies or times needed. To specify load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps".""",
),
2: PinSpecification(
name="step",
type_names=["int32"],
optional=True,
- document="""If a field is set as input, the step ids
- should be its scoping.""",
+ document=r"""if a Field is set as input, the step ids should be its scoping.""",
),
3: PinSpecification(
name="interpolation_type",
type_names=["int32"],
optional=True,
- document="""1 is ramped, 2 is stepped, default is 1.""",
+ document=r"""1 is ramped, 2 is stepped, default is 1.""",
),
4: PinSpecification(
name="force_new_time_freq_support",
type_names=["bool"],
optional=True,
- document="""If set to true, the output fields container
- will always have a new time freq
- support rescoped to the output
- time_freq_values (default is false).
- if set to false, the time freq
- support is only recreated when time
- or frequency values are between
- existing ones.""",
+ document=r"""If set to true, the output fields container will always have a new time freq support rescoped to the output time_freq_values (default is false). If set to false, the time freq support is only recreated when time or frequency values are between existing ones.""",
),
8: PinSpecification(
name="time_freq_support",
type_names=["time_freq_support"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -174,14 +157,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -190,29 +173,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="time_freq_interpolation", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsTimeFreqInterpolation:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTimeFreqInterpolation
+ inputs:
+ An instance of InputsTimeFreqInterpolation.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTimeFreqInterpolation:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTimeFreqInterpolation
+ outputs:
+ An instance of OutputsTimeFreqInterpolation.
"""
return super().outputs
@@ -265,12 +255,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._time_freq_support)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -283,17 +274,15 @@ def fields_container(self):
return self._fields_container
@property
- def time_freq_values(self):
- """Allows to connect time_freq_values input to the operator.
+ def time_freq_values(self) -> Input:
+ r"""Allows to connect time_freq_values input to the operator.
- List of frequencies or times needed. to
- specify load steps, put a field (and
- not a list) in input with a scoping
- located on "timefreq_steps".
+ list of frequencies or times needed. To specify load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps".
- Parameters
- ----------
- my_time_freq_values : float or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -306,15 +295,15 @@ def time_freq_values(self):
return self._time_freq_values
@property
- def step(self):
- """Allows to connect step input to the operator.
+ def step(self) -> Input:
+ r"""Allows to connect step input to the operator.
- If a field is set as input, the step ids
- should be its scoping.
+ if a Field is set as input, the step ids should be its scoping.
- Parameters
- ----------
- my_step : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -327,14 +316,15 @@ def step(self):
return self._step
@property
- def interpolation_type(self):
- """Allows to connect interpolation_type input to the operator.
+ def interpolation_type(self) -> Input:
+ r"""Allows to connect interpolation_type input to the operator.
1 is ramped, 2 is stepped, default is 1.
- Parameters
- ----------
- my_interpolation_type : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -347,21 +337,15 @@ def interpolation_type(self):
return self._interpolation_type
@property
- def force_new_time_freq_support(self):
- """Allows to connect force_new_time_freq_support input to the operator.
-
- If set to true, the output fields container
- will always have a new time freq
- support rescoped to the output
- time_freq_values (default is false).
- if set to false, the time freq
- support is only recreated when time
- or frequency values are between
- existing ones.
+ def force_new_time_freq_support(self) -> Input:
+ r"""Allows to connect force_new_time_freq_support input to the operator.
- Parameters
- ----------
- my_force_new_time_freq_support : bool
+ If set to true, the output fields container will always have a new time freq support rescoped to the output time_freq_values (default is false). If set to false, the time freq support is only recreated when time or frequency values are between existing ones.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -374,12 +358,13 @@ def force_new_time_freq_support(self):
return self._force_new_time_freq_support
@property
- def time_freq_support(self):
- """Allows to connect time_freq_support input to the operator.
+ def time_freq_support(self) -> Input:
+ r"""Allows to connect time_freq_support input to the operator.
- Parameters
- ----------
- my_time_freq_support : TimeFreqSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -412,18 +397,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.time_freq_interpolation()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/time_integration.py b/src/ansys/dpf/core/operators/math/time_integration.py
index ddfa3efc79d..e8b2bd7c0dc 100644
--- a/src/ansys/dpf/core/operators/math/time_integration.py
+++ b/src/ansys/dpf/core/operators/math/time_integration.py
@@ -4,32 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class time_integration(Operator):
- """Integrates a field of time varying quantities over time
+ r"""Integrates a field of time varying quantities over time
+
Parameters
----------
- field : Field
- Field
- resample_output : bool, optional
+ field: Field
+ field
+ resample_output: bool, optional
Resample the output
- absolute_error : float, optional
+ absolute_error: float, optional
Absolute error for the resampling
- minimum_step_size : float, optional
+ minimum_step_size: float, optional
Minimum time step size for the resamplig
- integration_constant : float, optional
+ integration_constant: float, optional
Constant to be added to the integrated field
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -88,8 +93,9 @@ def __init__(
self.inputs.integration_constant.connect(integration_constant)
@staticmethod
- def _spec():
- description = """Integrates a field of time varying quantities over time"""
+ def _spec() -> Specification:
+ description = r"""Integrates a field of time varying quantities over time
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -97,31 +103,31 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""Field""",
+ document=r"""field""",
),
1: PinSpecification(
name="resample_output",
type_names=["bool"],
optional=True,
- document="""Resample the output""",
+ document=r"""Resample the output""",
),
2: PinSpecification(
name="absolute_error",
type_names=["double"],
optional=True,
- document="""Absolute error for the resampling""",
+ document=r"""Absolute error for the resampling""",
),
3: PinSpecification(
name="minimum_step_size",
type_names=["double"],
optional=True,
- document="""Minimum time step size for the resamplig""",
+ document=r"""Minimum time step size for the resamplig""",
),
4: PinSpecification(
name="integration_constant",
type_names=["double"],
optional=True,
- document="""Constant to be added to the integrated field""",
+ document=r"""Constant to be added to the integrated field""",
),
},
map_output_pin_spec={
@@ -129,14 +135,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -145,29 +151,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="TimeIntegration", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsTimeIntegration:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTimeIntegration
+ inputs:
+ An instance of InputsTimeIntegration.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTimeIntegration:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTimeIntegration
+ outputs:
+ An instance of OutputsTimeIntegration.
"""
return super().outputs
@@ -210,14 +223,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._integration_constant)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field
+ field
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -230,14 +244,15 @@ def field(self):
return self._field
@property
- def resample_output(self):
- """Allows to connect resample_output input to the operator.
+ def resample_output(self) -> Input:
+ r"""Allows to connect resample_output input to the operator.
Resample the output
- Parameters
- ----------
- my_resample_output : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -250,14 +265,15 @@ def resample_output(self):
return self._resample_output
@property
- def absolute_error(self):
- """Allows to connect absolute_error input to the operator.
+ def absolute_error(self) -> Input:
+ r"""Allows to connect absolute_error input to the operator.
Absolute error for the resampling
- Parameters
- ----------
- my_absolute_error : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -270,14 +286,15 @@ def absolute_error(self):
return self._absolute_error
@property
- def minimum_step_size(self):
- """Allows to connect minimum_step_size input to the operator.
+ def minimum_step_size(self) -> Input:
+ r"""Allows to connect minimum_step_size input to the operator.
Minimum time step size for the resamplig
- Parameters
- ----------
- my_minimum_step_size : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -290,14 +307,15 @@ def minimum_step_size(self):
return self._minimum_step_size
@property
- def integration_constant(self):
- """Allows to connect integration_constant input to the operator.
+ def integration_constant(self) -> Input:
+ r"""Allows to connect integration_constant input to the operator.
Constant to be added to the integrated field
- Parameters
- ----------
- my_integration_constant : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -328,18 +346,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.time_integration()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/math/unit_convert.py b/src/ansys/dpf/core/operators/math/unit_convert.py
index 1c1c2b0621e..b9014c6e310 100644
--- a/src/ansys/dpf/core/operators/math/unit_convert.py
+++ b/src/ansys/dpf/core/operators/math/unit_convert.py
@@ -4,32 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class unit_convert(Operator):
- """Converts an input field/fields container or mesh of a given unit to
+ r"""Converts an input field/fields container or mesh of a given unit to
another unit.
+
Parameters
----------
- entity_to_convert : Field or FieldsContainer or MeshedRegion or MeshesContainer
- unit_name : str or int or Field
- Unit as a string, ex 'm' for meter, 'pa' for
- pascal,... or ansys unit system's id,
- or a field from which expected unit
- will be extracted.
+ entity_to_convert: Field or FieldsContainer or MeshedRegion or MeshesContainer
+ unit_name: str or int or Field
+ unit as a string, ex 'm' for meter, 'Pa' for pascal,... Or ansys unit system's ID, or a field from which expected unit will be extracted.
Returns
-------
- converted_entity : Field or FieldsContainer or MeshedRegion or MeshesContainer
- The output entity is the same as the input
- (inplace operator)
+ converted_entity: Field or FieldsContainer or MeshedRegion or MeshesContainer
+ the output entity is the same as the input (inplace operator)
Examples
--------
@@ -66,9 +67,10 @@ def __init__(
self.inputs.unit_name.connect(unit_name)
@staticmethod
- def _spec():
- description = """Converts an input field/fields container or mesh of a given unit to
- another unit."""
+ def _spec() -> Specification:
+ description = r"""Converts an input field/fields container or mesh of a given unit to
+another unit.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -81,16 +83,13 @@ def _spec():
"meshes_container",
],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="unit_name",
type_names=["string", "int32", "field"],
optional=False,
- document="""Unit as a string, ex 'm' for meter, 'pa' for
- pascal,... or ansys unit system's id,
- or a field from which expected unit
- will be extracted.""",
+ document=r"""unit as a string, ex 'm' for meter, 'Pa' for pascal,... Or ansys unit system's ID, or a field from which expected unit will be extracted.""",
),
},
map_output_pin_spec={
@@ -103,15 +102,14 @@ def _spec():
"meshes_container",
],
optional=False,
- document="""The output entity is the same as the input
- (inplace operator)""",
+ document=r"""the output entity is the same as the input (inplace operator)""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -120,29 +118,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="unit_convert", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsUnitConvert:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsUnitConvert
+ inputs:
+ An instance of InputsUnitConvert.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsUnitConvert:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsUnitConvert
+ outputs:
+ An instance of OutputsUnitConvert.
"""
return super().outputs
@@ -169,12 +174,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._unit_name)
@property
- def entity_to_convert(self):
- """Allows to connect entity_to_convert input to the operator.
+ def entity_to_convert(self) -> Input:
+ r"""Allows to connect entity_to_convert input to the operator.
- Parameters
- ----------
- my_entity_to_convert : Field or FieldsContainer or MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -187,17 +193,15 @@ def entity_to_convert(self):
return self._entity_to_convert
@property
- def unit_name(self):
- """Allows to connect unit_name input to the operator.
+ def unit_name(self) -> Input:
+ r"""Allows to connect unit_name input to the operator.
- Unit as a string, ex 'm' for meter, 'pa' for
- pascal,... or ansys unit system's id,
- or a field from which expected unit
- will be extracted.
+ unit as a string, ex 'm' for meter, 'Pa' for pascal,... Or ansys unit system's ID, or a field from which expected unit will be extracted.
- Parameters
- ----------
- my_unit_name : str or int or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
diff --git a/src/ansys/dpf/core/operators/math/unit_convert_fc.py b/src/ansys/dpf/core/operators/math/unit_convert_fc.py
index 74f21979e97..0b0c1e5bbad 100644
--- a/src/ansys/dpf/core/operators/math/unit_convert_fc.py
+++ b/src/ansys/dpf/core/operators/math/unit_convert_fc.py
@@ -4,26 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class unit_convert_fc(Operator):
- """Converts an input fields container of a given unit to another unit.
+ r"""Converts an input fields container of a given unit to another unit.
+
Parameters
----------
- fields_container : FieldsContainer
- unit_name : str
- Unit as a string, ex 'm' for meter, 'pa' for
- pascal,...
+ fields_container: FieldsContainer
+ unit_name: str
+ unit as a string, ex 'm' for meter, 'Pa' for pascal,...
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -58,10 +62,9 @@ def __init__(self, fields_container=None, unit_name=None, config=None, server=No
self.inputs.unit_name.connect(unit_name)
@staticmethod
- def _spec():
- description = (
- """Converts an input fields container of a given unit to another unit."""
- )
+ def _spec() -> Specification:
+ description = r"""Converts an input fields container of a given unit to another unit.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -69,14 +72,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="unit_name",
type_names=["string"],
optional=False,
- document="""Unit as a string, ex 'm' for meter, 'pa' for
- pascal,...""",
+ document=r"""unit as a string, ex 'm' for meter, 'Pa' for pascal,...""",
),
},
map_output_pin_spec={
@@ -84,14 +86,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -100,29 +102,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="unit_convert_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsUnitConvertFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsUnitConvertFc
+ inputs:
+ An instance of InputsUnitConvertFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsUnitConvertFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsUnitConvertFc
+ outputs:
+ An instance of OutputsUnitConvertFc.
"""
return super().outputs
@@ -149,12 +158,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._unit_name)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -167,15 +177,15 @@ def fields_container(self):
return self._fields_container
@property
- def unit_name(self):
- """Allows to connect unit_name input to the operator.
+ def unit_name(self) -> Input:
+ r"""Allows to connect unit_name input to the operator.
- Unit as a string, ex 'm' for meter, 'pa' for
- pascal,...
+ unit as a string, ex 'm' for meter, 'Pa' for pascal,...
- Parameters
- ----------
- my_unit_name : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -206,18 +216,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.unit_convert_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/window_bartlett.py b/src/ansys/dpf/core/operators/math/window_bartlett.py
index 7a90b82f863..5113010f61e 100644
--- a/src/ansys/dpf/core/operators/math/window_bartlett.py
+++ b/src/ansys/dpf/core/operators/math/window_bartlett.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class window_bartlett(Operator):
- """Apply bartlett windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is evenly
- spaced (use time_freq_interpolation before otherwise).
+ r"""Apply bartlett windowing on a given FieldsContainer having time label or
+ a Field located on time. Assume that time sampling is evenly spaced (use
+ time_freq_interpolation before otherwise).
+
Parameters
----------
- field : Field or FieldsContainer
+ field: Field or FieldsContainer
Returns
-------
- field : Field or FieldsContainer
+ field: Field or FieldsContainer
Examples
--------
@@ -53,11 +58,11 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Apply bartlett windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is
- evenly spaced (use time_freq_interpolation before
- otherwise)."""
+ def _spec() -> Specification:
+ description = r"""Apply bartlett windowing on a given FieldsContainer having time label or
+a Field located on time. Assume that time sampling is evenly spaced (use
+time_freq_interpolation before otherwise).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,7 +70,7 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -73,14 +78,14 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -89,29 +94,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="window::bartlett", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsWindowBartlett:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsWindowBartlett
+ inputs:
+ An instance of InputsWindowBartlett.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsWindowBartlett:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsWindowBartlett
+ outputs:
+ An instance of OutputsWindowBartlett.
"""
return super().outputs
@@ -134,12 +146,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
diff --git a/src/ansys/dpf/core/operators/math/window_bartlett_fc.py b/src/ansys/dpf/core/operators/math/window_bartlett_fc.py
index 6f2594f9bc9..487f5fad608 100644
--- a/src/ansys/dpf/core/operators/math/window_bartlett_fc.py
+++ b/src/ansys/dpf/core/operators/math/window_bartlett_fc.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class window_bartlett_fc(Operator):
- """Apply bartlett windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is evenly
- spaced (use time_freq_interpolation before otherwise).
+ r"""Apply bartlett windowing on a given FieldsContainer having time label or
+ a Field located on time. Assume that time sampling is evenly spaced (use
+ time_freq_interpolation before otherwise).
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -52,11 +57,11 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Apply bartlett windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is
- evenly spaced (use time_freq_interpolation before
- otherwise)."""
+ def _spec() -> Specification:
+ description = r"""Apply bartlett windowing on a given FieldsContainer having time label or
+a Field located on time. Assume that time sampling is evenly spaced (use
+time_freq_interpolation before otherwise).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -64,7 +69,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -72,14 +77,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -88,29 +93,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="window::bartlett_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsWindowBartlettFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsWindowBartlettFc
+ inputs:
+ An instance of InputsWindowBartlettFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsWindowBartlettFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsWindowBartlettFc
+ outputs:
+ An instance of OutputsWindowBartlettFc.
"""
return super().outputs
@@ -135,12 +147,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -171,18 +184,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.window_bartlett_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/window_blackman.py b/src/ansys/dpf/core/operators/math/window_blackman.py
index d0f12aa4580..7b886a2ede2 100644
--- a/src/ansys/dpf/core/operators/math/window_blackman.py
+++ b/src/ansys/dpf/core/operators/math/window_blackman.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class window_blackman(Operator):
- """Apply blackman windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is evenly
- spaced (use time_freq_interpolation before otherwise).
+ r"""Apply blackman windowing on a given FieldsContainer having time label or
+ a Field located on time. Assume that time sampling is evenly spaced (use
+ time_freq_interpolation before otherwise).
+
Parameters
----------
- field : Field or FieldsContainer
+ field: Field or FieldsContainer
Returns
-------
- field : Field or FieldsContainer
+ field: Field or FieldsContainer
Examples
--------
@@ -53,11 +58,11 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Apply blackman windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is
- evenly spaced (use time_freq_interpolation before
- otherwise)."""
+ def _spec() -> Specification:
+ description = r"""Apply blackman windowing on a given FieldsContainer having time label or
+a Field located on time. Assume that time sampling is evenly spaced (use
+time_freq_interpolation before otherwise).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,7 +70,7 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -73,14 +78,14 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -89,29 +94,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="window::blackman", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsWindowBlackman:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsWindowBlackman
+ inputs:
+ An instance of InputsWindowBlackman.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsWindowBlackman:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsWindowBlackman
+ outputs:
+ An instance of OutputsWindowBlackman.
"""
return super().outputs
@@ -134,12 +146,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
diff --git a/src/ansys/dpf/core/operators/math/window_blackman_fc.py b/src/ansys/dpf/core/operators/math/window_blackman_fc.py
index d9fb7f457f5..4c3e365b69f 100644
--- a/src/ansys/dpf/core/operators/math/window_blackman_fc.py
+++ b/src/ansys/dpf/core/operators/math/window_blackman_fc.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class window_blackman_fc(Operator):
- """Apply blackman windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is evenly
- spaced (use time_freq_interpolation before otherwise).
+ r"""Apply blackman windowing on a given FieldsContainer having time label or
+ a Field located on time. Assume that time sampling is evenly spaced (use
+ time_freq_interpolation before otherwise).
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -52,11 +57,11 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Apply blackman windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is
- evenly spaced (use time_freq_interpolation before
- otherwise)."""
+ def _spec() -> Specification:
+ description = r"""Apply blackman windowing on a given FieldsContainer having time label or
+a Field located on time. Assume that time sampling is evenly spaced (use
+time_freq_interpolation before otherwise).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -64,7 +69,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -72,14 +77,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -88,29 +93,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="window::blackman_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsWindowBlackmanFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsWindowBlackmanFc
+ inputs:
+ An instance of InputsWindowBlackmanFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsWindowBlackmanFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsWindowBlackmanFc
+ outputs:
+ An instance of OutputsWindowBlackmanFc.
"""
return super().outputs
@@ -135,12 +147,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -171,18 +184,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.window_blackman_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/window_hamming.py b/src/ansys/dpf/core/operators/math/window_hamming.py
index 24da2dbf6d1..a1ce7a6699f 100644
--- a/src/ansys/dpf/core/operators/math/window_hamming.py
+++ b/src/ansys/dpf/core/operators/math/window_hamming.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class window_hamming(Operator):
- """Apply hamming windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is evenly
- spaced (use time_freq_interpolation before otherwise).
+ r"""Apply hamming windowing on a given FieldsContainer having time label or
+ a Field located on time. Assume that time sampling is evenly spaced (use
+ time_freq_interpolation before otherwise).
+
Parameters
----------
- field : Field or FieldsContainer
+ field: Field or FieldsContainer
Returns
-------
- field : Field or FieldsContainer
+ field: Field or FieldsContainer
Examples
--------
@@ -53,11 +58,11 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Apply hamming windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is
- evenly spaced (use time_freq_interpolation before
- otherwise)."""
+ def _spec() -> Specification:
+ description = r"""Apply hamming windowing on a given FieldsContainer having time label or
+a Field located on time. Assume that time sampling is evenly spaced (use
+time_freq_interpolation before otherwise).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,7 +70,7 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -73,14 +78,14 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -89,29 +94,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="window::hamming", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsWindowHamming:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsWindowHamming
+ inputs:
+ An instance of InputsWindowHamming.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsWindowHamming:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsWindowHamming
+ outputs:
+ An instance of OutputsWindowHamming.
"""
return super().outputs
@@ -134,12 +146,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
diff --git a/src/ansys/dpf/core/operators/math/window_hamming_fc.py b/src/ansys/dpf/core/operators/math/window_hamming_fc.py
index e095b25d2e5..05567570bb7 100644
--- a/src/ansys/dpf/core/operators/math/window_hamming_fc.py
+++ b/src/ansys/dpf/core/operators/math/window_hamming_fc.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class window_hamming_fc(Operator):
- """Apply hamming windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is evenly
- spaced (use time_freq_interpolation before otherwise).
+ r"""Apply hamming windowing on a given FieldsContainer having time label or
+ a Field located on time. Assume that time sampling is evenly spaced (use
+ time_freq_interpolation before otherwise).
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -52,11 +57,11 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Apply hamming windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is
- evenly spaced (use time_freq_interpolation before
- otherwise)."""
+ def _spec() -> Specification:
+ description = r"""Apply hamming windowing on a given FieldsContainer having time label or
+a Field located on time. Assume that time sampling is evenly spaced (use
+time_freq_interpolation before otherwise).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -64,7 +69,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -72,14 +77,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -88,29 +93,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="window::hamming_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsWindowHammingFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsWindowHammingFc
+ inputs:
+ An instance of InputsWindowHammingFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsWindowHammingFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsWindowHammingFc
+ outputs:
+ An instance of OutputsWindowHammingFc.
"""
return super().outputs
@@ -135,12 +147,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -171,18 +184,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.window_hamming_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/window_hanning.py b/src/ansys/dpf/core/operators/math/window_hanning.py
index af6a2a900f7..55016ce1b4e 100644
--- a/src/ansys/dpf/core/operators/math/window_hanning.py
+++ b/src/ansys/dpf/core/operators/math/window_hanning.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class window_hanning(Operator):
- """Apply hanning windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is evenly
- spaced (use time_freq_interpolation before otherwise).
+ r"""Apply hanning windowing on a given FieldsContainer having time label or
+ a Field located on time. Assume that time sampling is evenly spaced (use
+ time_freq_interpolation before otherwise).
+
Parameters
----------
- field : Field or FieldsContainer
+ field: Field or FieldsContainer
Returns
-------
- field : Field or FieldsContainer
+ field: Field or FieldsContainer
Examples
--------
@@ -53,11 +58,11 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Apply hanning windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is
- evenly spaced (use time_freq_interpolation before
- otherwise)."""
+ def _spec() -> Specification:
+ description = r"""Apply hanning windowing on a given FieldsContainer having time label or
+a Field located on time. Assume that time sampling is evenly spaced (use
+time_freq_interpolation before otherwise).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,7 +70,7 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -73,14 +78,14 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -89,29 +94,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="window::hanning", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsWindowHanning:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsWindowHanning
+ inputs:
+ An instance of InputsWindowHanning.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsWindowHanning:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsWindowHanning
+ outputs:
+ An instance of OutputsWindowHanning.
"""
return super().outputs
@@ -134,12 +146,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
diff --git a/src/ansys/dpf/core/operators/math/window_hanning_fc.py b/src/ansys/dpf/core/operators/math/window_hanning_fc.py
index a8811afabbb..8814690f4d1 100644
--- a/src/ansys/dpf/core/operators/math/window_hanning_fc.py
+++ b/src/ansys/dpf/core/operators/math/window_hanning_fc.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class window_hanning_fc(Operator):
- """Apply hanning windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is evenly
- spaced (use time_freq_interpolation before otherwise).
+ r"""Apply hanning windowing on a given FieldsContainer having time label or
+ a Field located on time. Assume that time sampling is evenly spaced (use
+ time_freq_interpolation before otherwise).
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -52,11 +57,11 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Apply hanning windowing on a given FieldsContainer having time label
- or a Field located on time. Assume that time sampling is
- evenly spaced (use time_freq_interpolation before
- otherwise)."""
+ def _spec() -> Specification:
+ description = r"""Apply hanning windowing on a given FieldsContainer having time label or
+a Field located on time. Assume that time sampling is evenly spaced (use
+time_freq_interpolation before otherwise).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -64,7 +69,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -72,14 +77,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -88,29 +93,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="window::hanning_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsWindowHanningFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsWindowHanningFc
+ inputs:
+ An instance of InputsWindowHanningFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsWindowHanningFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsWindowHanningFc
+ outputs:
+ An instance of OutputsWindowHanningFc.
"""
return super().outputs
@@ -135,12 +147,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -171,18 +184,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.window_hanning_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/window_triangular.py b/src/ansys/dpf/core/operators/math/window_triangular.py
index c9c03dba629..5ae3ca5849b 100644
--- a/src/ansys/dpf/core/operators/math/window_triangular.py
+++ b/src/ansys/dpf/core/operators/math/window_triangular.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class window_triangular(Operator):
- """Apply triangular windowing on a given FieldsContainer having time
- label or a Field located on time. Assume that time sampling is
- evenly spaced (use time_freq_interpolation before otherwise).
+ r"""Apply triangular windowing on a given FieldsContainer having time label
+ or a Field located on time. Assume that time sampling is evenly spaced
+ (use time_freq_interpolation before otherwise).
+
Parameters
----------
- field : Field or FieldsContainer
+ field: Field or FieldsContainer
Returns
-------
- field : Field or FieldsContainer
+ field: Field or FieldsContainer
Examples
--------
@@ -53,11 +58,11 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Apply triangular windowing on a given FieldsContainer having time
- label or a Field located on time. Assume that time
- sampling is evenly spaced (use time_freq_interpolation
- before otherwise)."""
+ def _spec() -> Specification:
+ description = r"""Apply triangular windowing on a given FieldsContainer having time label
+or a Field located on time. Assume that time sampling is evenly spaced
+(use time_freq_interpolation before otherwise).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,7 +70,7 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -73,14 +78,14 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -89,29 +94,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="window::triangular", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsWindowTriangular:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsWindowTriangular
+ inputs:
+ An instance of InputsWindowTriangular.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsWindowTriangular:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsWindowTriangular
+ outputs:
+ An instance of OutputsWindowTriangular.
"""
return super().outputs
@@ -134,12 +146,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
diff --git a/src/ansys/dpf/core/operators/math/window_triangular_fc.py b/src/ansys/dpf/core/operators/math/window_triangular_fc.py
index 49e0695c769..7dbc57b16fa 100644
--- a/src/ansys/dpf/core/operators/math/window_triangular_fc.py
+++ b/src/ansys/dpf/core/operators/math/window_triangular_fc.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class window_triangular_fc(Operator):
- """Apply triangular windowing on a given FieldsContainer having time
- label or a Field located on time. Assume that time sampling is
- evenly spaced (use time_freq_interpolation before otherwise).
+ r"""Apply triangular windowing on a given FieldsContainer having time label
+ or a Field located on time. Assume that time sampling is evenly spaced
+ (use time_freq_interpolation before otherwise).
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -52,11 +57,11 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Apply triangular windowing on a given FieldsContainer having time
- label or a Field located on time. Assume that time
- sampling is evenly spaced (use time_freq_interpolation
- before otherwise)."""
+ def _spec() -> Specification:
+ description = r"""Apply triangular windowing on a given FieldsContainer having time label
+or a Field located on time. Assume that time sampling is evenly spaced
+(use time_freq_interpolation before otherwise).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -64,7 +69,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -72,14 +77,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -88,29 +93,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="window::triangular_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsWindowTriangularFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsWindowTriangularFc
+ inputs:
+ An instance of InputsWindowTriangularFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsWindowTriangularFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsWindowTriangularFc
+ outputs:
+ An instance of OutputsWindowTriangularFc.
"""
return super().outputs
@@ -135,12 +147,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -173,18 +186,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.window_triangular_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/math/window_welch.py b/src/ansys/dpf/core/operators/math/window_welch.py
index 1559a2d9db8..3eda14436f8 100644
--- a/src/ansys/dpf/core/operators/math/window_welch.py
+++ b/src/ansys/dpf/core/operators/math/window_welch.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class window_welch(Operator):
- """Apply welch windowing on a given FieldsContainer having time label or
- a Field located on time. Assume that time sampling is evenly
- spaced (use time_freq_interpolation before otherwise).
+ r"""Apply welch windowing on a given FieldsContainer having time label or a
+ Field located on time. Assume that time sampling is evenly spaced (use
+ time_freq_interpolation before otherwise).
+
Parameters
----------
- field : Field or FieldsContainer
+ field: Field or FieldsContainer
Returns
-------
- field : Field or FieldsContainer
+ field: Field or FieldsContainer
Examples
--------
@@ -53,11 +58,11 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Apply welch windowing on a given FieldsContainer having time label or
- a Field located on time. Assume that time sampling is
- evenly spaced (use time_freq_interpolation before
- otherwise)."""
+ def _spec() -> Specification:
+ description = r"""Apply welch windowing on a given FieldsContainer having time label or a
+Field located on time. Assume that time sampling is evenly spaced (use
+time_freq_interpolation before otherwise).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,7 +70,7 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -73,14 +78,14 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -89,29 +94,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="window::welch", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsWindowWelch:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsWindowWelch
+ inputs:
+ An instance of InputsWindowWelch.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsWindowWelch:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsWindowWelch
+ outputs:
+ An instance of OutputsWindowWelch.
"""
return super().outputs
@@ -134,12 +146,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
diff --git a/src/ansys/dpf/core/operators/math/window_welch_fc.py b/src/ansys/dpf/core/operators/math/window_welch_fc.py
index b9e45a54c42..90e13384fbc 100644
--- a/src/ansys/dpf/core/operators/math/window_welch_fc.py
+++ b/src/ansys/dpf/core/operators/math/window_welch_fc.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class window_welch_fc(Operator):
- """Apply welch windowing on a given FieldsContainer having time label or
- a Field located on time. Assume that time sampling is evenly
- spaced (use time_freq_interpolation before otherwise).
+ r"""Apply welch windowing on a given FieldsContainer having time label or a
+ Field located on time. Assume that time sampling is evenly spaced (use
+ time_freq_interpolation before otherwise).
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -52,11 +57,11 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Apply welch windowing on a given FieldsContainer having time label or
- a Field located on time. Assume that time sampling is
- evenly spaced (use time_freq_interpolation before
- otherwise)."""
+ def _spec() -> Specification:
+ description = r"""Apply welch windowing on a given FieldsContainer having time label or a
+Field located on time. Assume that time sampling is evenly spaced (use
+time_freq_interpolation before otherwise).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -64,7 +69,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -72,14 +77,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -88,29 +93,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="window::welch_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsWindowWelchFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsWindowWelchFc
+ inputs:
+ An instance of InputsWindowWelchFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsWindowWelchFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsWindowWelchFc
+ outputs:
+ An instance of OutputsWindowWelchFc.
"""
return super().outputs
@@ -133,12 +145,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -169,18 +182,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.window_welch_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/mesh/acmo_mesh_provider.py b/src/ansys/dpf/core/operators/mesh/acmo_mesh_provider.py
index a35cb2be0ba..641dfafd3e3 100644
--- a/src/ansys/dpf/core/operators/mesh/acmo_mesh_provider.py
+++ b/src/ansys/dpf/core/operators/mesh/acmo_mesh_provider.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class acmo_mesh_provider(Operator):
- """Converts an Assembly Mesh into a DPF Meshes container
+ r"""Converts an Assembly Mesh into a DPF Meshes container
+
Parameters
----------
- assembly_mesh : AnsDispatchHolder or Struct Iansdispatch
- unit : str, optional
+ assembly_mesh: AnsDispatchHolder or Struct Iansdispatch
+ unit: str, optional
Returns
-------
- meshes_container : MeshesContainer
+ meshes_container: MeshesContainer
Examples
--------
@@ -56,8 +61,9 @@ def __init__(self, assembly_mesh=None, unit=None, config=None, server=None):
self.inputs.unit.connect(unit)
@staticmethod
- def _spec():
- description = """Converts an Assembly Mesh into a DPF Meshes container"""
+ def _spec() -> Specification:
+ description = r"""Converts an Assembly Mesh into a DPF Meshes container
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,13 +71,13 @@ def _spec():
name="assembly_mesh",
type_names=["ans_dispatch_holder", "struct IAnsDispatch"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="unit",
type_names=["string"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -79,14 +85,14 @@ def _spec():
name="meshes_container",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -95,29 +101,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="acmo_mesh_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAcmoMeshProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAcmoMeshProvider
+ inputs:
+ An instance of InputsAcmoMeshProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAcmoMeshProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAcmoMeshProvider
+ outputs:
+ An instance of OutputsAcmoMeshProvider.
"""
return super().outputs
@@ -144,12 +157,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._unit)
@property
- def assembly_mesh(self):
- """Allows to connect assembly_mesh input to the operator.
+ def assembly_mesh(self) -> Input:
+ r"""Allows to connect assembly_mesh input to the operator.
- Parameters
- ----------
- my_assembly_mesh : AnsDispatchHolder or Struct Iansdispatch
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -162,12 +176,13 @@ def assembly_mesh(self):
return self._assembly_mesh
@property
- def unit(self):
- """Allows to connect unit input to the operator.
+ def unit(self) -> Input:
+ r"""Allows to connect unit input to the operator.
- Parameters
- ----------
- my_unit : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -198,18 +213,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._meshes_container)
@property
- def meshes_container(self):
- """Allows to get meshes_container output of the operator
+ def meshes_container(self) -> Output:
+ r"""Allows to get meshes_container output of the operator
Returns
- ----------
- my_meshes_container : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.acmo_mesh_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_meshes_container = op.outputs.meshes_container()
- """ # noqa: E501
+ """
return self._meshes_container
diff --git a/src/ansys/dpf/core/operators/mesh/beam_properties.py b/src/ansys/dpf/core/operators/mesh/beam_properties.py
index aae7c336f6b..e58f5798ec6 100644
--- a/src/ansys/dpf/core/operators/mesh/beam_properties.py
+++ b/src/ansys/dpf/core/operators/mesh/beam_properties.py
@@ -4,71 +4,58 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class beam_properties(Operator):
- """Reads the beam's properties from the result files contained in the
+ r"""Reads the beam’s properties from the result files contained in the
streams or data sources.
+
Parameters
----------
- streams : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data.
- data_sources : DataSources
- Result file path container, used if no
- streams are set.
+ streams: StreamsContainer, optional
+ Result file container allowed to be kept open to cache data.
+ data_sources: DataSources
+ Result file path container, used if no streams are set.
Returns
-------
- mesh_out : MeshedRegion
- This mesh updates a new map containing a
- field of the beam's properties if
- there is at least one beam in mesh.
- field_type_section_id : Field
- This field contains the section id of beams.
- 1:rec; 3:csolid, 4:ctube, 5:chan,
- 6:z, 7:l, 8:i, 9:t, 11:hats, 12:hrec.
- field_area : Field
+ mesh_out: MeshedRegion
+ This mesh updates a new map containing a field of the beam's properties if there is at least one beam in mesh.
+ field_type_section_id: Field
+ This field contains the section ID of beams. 1:REC; 3:CSOLID, 4:CTUBE, 5:CHAN, 6:Z, 7:L, 8:I, 9:T, 11:HATS, 12:HREC.
+ field_area: Field
This field contains the area of beams.
- field_moment_inertia : Field
- This field contains the inertia moment of
- beams. iyy, iyz, izz.
- field_geometry : Field
- This field contains the geometry of beams.
- rec:b,h. csolid:ri. ctube:ri, re.
- chan:w1,w2,w3,t1,t2,t3.
- z:w1,w2,w3,t1,t2,t3. l:w1,w2,t1,t2.
- i:w1,w2,w3,t1,t2,t3. t:w1,w2,t1,t2.
- hats: w1,w2,w3,w4,t1,t2,t3,t4.
- hrec:w1,w2,t1,t2,t3,t4.
- field_young_modulus : Field
- This field contains the young's modulus of
- beams.
- field_poisson_ratio : Field
- This field contains the poisson's ratio of
- beams.
- field_shear_modulus : Field
- This field contains the shear modulus of
- beams.
- field_beam_length : Field
+ field_moment_inertia: Field
+ This field contains the inertia moment of beams. Iyy, Iyz, Izz.
+ field_geometry: Field
+ This field contains the geometry of beams. REC:b,h. CSOLID:Ri. CTUBE:Ri, Re. CHAN:w1,w2,w3,t1,t2,t3. Z:w1,w2,w3,t1,t2,t3. L:w1,w2,t1,t2. I:w1,w2,w3,t1,t2,t3. T:w1,w2,t1,t2. HATS: w1,w2,w3,w4,t1,t2,t3,t4. HREC:w1,w2,t1,t2,t3,t4.
+ field_young_modulus: Field
+ This field contains the Young's modulus of beams.
+ field_poisson_ratio: Field
+ This field contains the Poisson's ratio of beams.
+ field_shear_modulus: Field
+ This field contains the Shear Modulus of beams.
+ field_beam_length: Field
This field contains the length of beams.
- field_torsion_constant : Field
- This field contains the torsion constant of
- beams.
- field_warping_constant : Field
- This field contains the warping constant of
- beams.
- field_offset_type : Field
+ field_torsion_constant: Field
+ This field contains the Torsion Constant of beams.
+ field_warping_constant: Field
+ This field contains the Warping Constant of beams.
+ field_offset_type: Field
This field contains offset type of beams.
- field_offset_y : Field
+ field_offset_y: Field
This field contains offset y of beams.
- field_offset_z : Field
+ field_offset_z: Field
This field contains offset z of beams.
Examples
@@ -117,9 +104,10 @@ def __init__(self, streams=None, data_sources=None, config=None, server=None):
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Reads the beam's properties from the result files contained in the
- streams or data sources."""
+ def _spec() -> Specification:
+ description = r"""Reads the beam’s properties from the result files contained in the
+streams or data sources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -127,15 +115,13 @@ def _spec():
name="streams",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data.""",
+ document=r"""Result file container allowed to be kept open to cache data.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set.""",
+ document=r"""Result file path container, used if no streams are set.""",
),
},
map_output_pin_spec={
@@ -143,108 +129,92 @@ def _spec():
name="mesh_out",
type_names=["abstract_meshed_region"],
optional=False,
- document="""This mesh updates a new map containing a
- field of the beam's properties if
- there is at least one beam in mesh.""",
+ document=r"""This mesh updates a new map containing a field of the beam's properties if there is at least one beam in mesh.""",
),
1: PinSpecification(
name="field_type_section_id",
type_names=["field"],
optional=False,
- document="""This field contains the section id of beams.
- 1:rec; 3:csolid, 4:ctube, 5:chan,
- 6:z, 7:l, 8:i, 9:t, 11:hats, 12:hrec.""",
+ document=r"""This field contains the section ID of beams. 1:REC; 3:CSOLID, 4:CTUBE, 5:CHAN, 6:Z, 7:L, 8:I, 9:T, 11:HATS, 12:HREC.""",
),
2: PinSpecification(
name="field_area",
type_names=["field"],
optional=False,
- document="""This field contains the area of beams.""",
+ document=r"""This field contains the area of beams.""",
),
3: PinSpecification(
name="field_moment_inertia",
type_names=["field"],
optional=False,
- document="""This field contains the inertia moment of
- beams. iyy, iyz, izz.""",
+ document=r"""This field contains the inertia moment of beams. Iyy, Iyz, Izz.""",
),
4: PinSpecification(
name="field_geometry",
type_names=["field"],
optional=False,
- document="""This field contains the geometry of beams.
- rec:b,h. csolid:ri. ctube:ri, re.
- chan:w1,w2,w3,t1,t2,t3.
- z:w1,w2,w3,t1,t2,t3. l:w1,w2,t1,t2.
- i:w1,w2,w3,t1,t2,t3. t:w1,w2,t1,t2.
- hats: w1,w2,w3,w4,t1,t2,t3,t4.
- hrec:w1,w2,t1,t2,t3,t4.""",
+ document=r"""This field contains the geometry of beams. REC:b,h. CSOLID:Ri. CTUBE:Ri, Re. CHAN:w1,w2,w3,t1,t2,t3. Z:w1,w2,w3,t1,t2,t3. L:w1,w2,t1,t2. I:w1,w2,w3,t1,t2,t3. T:w1,w2,t1,t2. HATS: w1,w2,w3,w4,t1,t2,t3,t4. HREC:w1,w2,t1,t2,t3,t4.""",
),
5: PinSpecification(
name="field_young_modulus",
type_names=["field"],
optional=False,
- document="""This field contains the young's modulus of
- beams.""",
+ document=r"""This field contains the Young's modulus of beams.""",
),
6: PinSpecification(
name="field_poisson_ratio",
type_names=["field"],
optional=False,
- document="""This field contains the poisson's ratio of
- beams.""",
+ document=r"""This field contains the Poisson's ratio of beams.""",
),
7: PinSpecification(
name="field_shear_modulus",
type_names=["field"],
optional=False,
- document="""This field contains the shear modulus of
- beams.""",
+ document=r"""This field contains the Shear Modulus of beams.""",
),
8: PinSpecification(
name="field_beam_length",
type_names=["field"],
optional=False,
- document="""This field contains the length of beams.""",
+ document=r"""This field contains the length of beams.""",
),
9: PinSpecification(
name="field_torsion_constant",
type_names=["field"],
optional=False,
- document="""This field contains the torsion constant of
- beams.""",
+ document=r"""This field contains the Torsion Constant of beams.""",
),
10: PinSpecification(
name="field_warping_constant",
type_names=["field"],
optional=False,
- document="""This field contains the warping constant of
- beams.""",
+ document=r"""This field contains the Warping Constant of beams.""",
),
11: PinSpecification(
name="field_offset_type",
type_names=["field"],
optional=False,
- document="""This field contains offset type of beams.""",
+ document=r"""This field contains offset type of beams.""",
),
12: PinSpecification(
name="field_offset_y",
type_names=["field"],
optional=False,
- document="""This field contains offset y of beams.""",
+ document=r"""This field contains offset y of beams.""",
),
13: PinSpecification(
name="field_offset_z",
type_names=["field"],
optional=False,
- document="""This field contains offset z of beams.""",
+ document=r"""This field contains offset z of beams.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -253,29 +223,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="beam_properties", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsBeamProperties:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsBeamProperties
+ inputs:
+ An instance of InputsBeamProperties.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsBeamProperties:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsBeamProperties
+ outputs:
+ An instance of OutputsBeamProperties.
"""
return super().outputs
@@ -302,15 +279,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def streams(self):
- """Allows to connect streams input to the operator.
+ def streams(self) -> Input:
+ r"""Allows to connect streams input to the operator.
- Result file container allowed to be kept open
- to cache data.
+ Result file container allowed to be kept open to cache data.
- Parameters
- ----------
- my_streams : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -323,15 +300,15 @@ def streams(self):
return self._streams
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set.
+ Result file path container, used if no streams are set.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -409,239 +386,281 @@ def __init__(self, op: Operator):
self._outputs.append(self._field_offset_z)
@property
- def mesh_out(self):
- """Allows to get mesh_out output of the operator
+ def mesh_out(self) -> Output:
+ r"""Allows to get mesh_out output of the operator
+
+ This mesh updates a new map containing a field of the beam's properties if there is at least one beam in mesh.
Returns
- ----------
- my_mesh_out : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.beam_properties()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mesh_out = op.outputs.mesh_out()
- """ # noqa: E501
+ """
return self._mesh_out
@property
- def field_type_section_id(self):
- """Allows to get field_type_section_id output of the operator
+ def field_type_section_id(self) -> Output:
+ r"""Allows to get field_type_section_id output of the operator
+
+ This field contains the section ID of beams. 1:REC; 3:CSOLID, 4:CTUBE, 5:CHAN, 6:Z, 7:L, 8:I, 9:T, 11:HATS, 12:HREC.
Returns
- ----------
- my_field_type_section_id : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.beam_properties()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_type_section_id = op.outputs.field_type_section_id()
- """ # noqa: E501
+ """
return self._field_type_section_id
@property
- def field_area(self):
- """Allows to get field_area output of the operator
+ def field_area(self) -> Output:
+ r"""Allows to get field_area output of the operator
+
+ This field contains the area of beams.
Returns
- ----------
- my_field_area : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.beam_properties()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_area = op.outputs.field_area()
- """ # noqa: E501
+ """
return self._field_area
@property
- def field_moment_inertia(self):
- """Allows to get field_moment_inertia output of the operator
+ def field_moment_inertia(self) -> Output:
+ r"""Allows to get field_moment_inertia output of the operator
+
+ This field contains the inertia moment of beams. Iyy, Iyz, Izz.
Returns
- ----------
- my_field_moment_inertia : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.beam_properties()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_moment_inertia = op.outputs.field_moment_inertia()
- """ # noqa: E501
+ """
return self._field_moment_inertia
@property
- def field_geometry(self):
- """Allows to get field_geometry output of the operator
+ def field_geometry(self) -> Output:
+ r"""Allows to get field_geometry output of the operator
+
+ This field contains the geometry of beams. REC:b,h. CSOLID:Ri. CTUBE:Ri, Re. CHAN:w1,w2,w3,t1,t2,t3. Z:w1,w2,w3,t1,t2,t3. L:w1,w2,t1,t2. I:w1,w2,w3,t1,t2,t3. T:w1,w2,t1,t2. HATS: w1,w2,w3,w4,t1,t2,t3,t4. HREC:w1,w2,t1,t2,t3,t4.
Returns
- ----------
- my_field_geometry : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.beam_properties()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_geometry = op.outputs.field_geometry()
- """ # noqa: E501
+ """
return self._field_geometry
@property
- def field_young_modulus(self):
- """Allows to get field_young_modulus output of the operator
+ def field_young_modulus(self) -> Output:
+ r"""Allows to get field_young_modulus output of the operator
+
+ This field contains the Young's modulus of beams.
Returns
- ----------
- my_field_young_modulus : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.beam_properties()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_young_modulus = op.outputs.field_young_modulus()
- """ # noqa: E501
+ """
return self._field_young_modulus
@property
- def field_poisson_ratio(self):
- """Allows to get field_poisson_ratio output of the operator
+ def field_poisson_ratio(self) -> Output:
+ r"""Allows to get field_poisson_ratio output of the operator
+
+ This field contains the Poisson's ratio of beams.
Returns
- ----------
- my_field_poisson_ratio : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.beam_properties()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_poisson_ratio = op.outputs.field_poisson_ratio()
- """ # noqa: E501
+ """
return self._field_poisson_ratio
@property
- def field_shear_modulus(self):
- """Allows to get field_shear_modulus output of the operator
+ def field_shear_modulus(self) -> Output:
+ r"""Allows to get field_shear_modulus output of the operator
+
+ This field contains the Shear Modulus of beams.
Returns
- ----------
- my_field_shear_modulus : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.beam_properties()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_shear_modulus = op.outputs.field_shear_modulus()
- """ # noqa: E501
+ """
return self._field_shear_modulus
@property
- def field_beam_length(self):
- """Allows to get field_beam_length output of the operator
+ def field_beam_length(self) -> Output:
+ r"""Allows to get field_beam_length output of the operator
+
+ This field contains the length of beams.
Returns
- ----------
- my_field_beam_length : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.beam_properties()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_beam_length = op.outputs.field_beam_length()
- """ # noqa: E501
+ """
return self._field_beam_length
@property
- def field_torsion_constant(self):
- """Allows to get field_torsion_constant output of the operator
+ def field_torsion_constant(self) -> Output:
+ r"""Allows to get field_torsion_constant output of the operator
+
+ This field contains the Torsion Constant of beams.
Returns
- ----------
- my_field_torsion_constant : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.beam_properties()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_torsion_constant = op.outputs.field_torsion_constant()
- """ # noqa: E501
+ """
return self._field_torsion_constant
@property
- def field_warping_constant(self):
- """Allows to get field_warping_constant output of the operator
+ def field_warping_constant(self) -> Output:
+ r"""Allows to get field_warping_constant output of the operator
+
+ This field contains the Warping Constant of beams.
Returns
- ----------
- my_field_warping_constant : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.beam_properties()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_warping_constant = op.outputs.field_warping_constant()
- """ # noqa: E501
+ """
return self._field_warping_constant
@property
- def field_offset_type(self):
- """Allows to get field_offset_type output of the operator
+ def field_offset_type(self) -> Output:
+ r"""Allows to get field_offset_type output of the operator
+
+ This field contains offset type of beams.
Returns
- ----------
- my_field_offset_type : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.beam_properties()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_offset_type = op.outputs.field_offset_type()
- """ # noqa: E501
+ """
return self._field_offset_type
@property
- def field_offset_y(self):
- """Allows to get field_offset_y output of the operator
+ def field_offset_y(self) -> Output:
+ r"""Allows to get field_offset_y output of the operator
+
+ This field contains offset y of beams.
Returns
- ----------
- my_field_offset_y : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.beam_properties()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_offset_y = op.outputs.field_offset_y()
- """ # noqa: E501
+ """
return self._field_offset_y
@property
- def field_offset_z(self):
- """Allows to get field_offset_z output of the operator
+ def field_offset_z(self) -> Output:
+ r"""Allows to get field_offset_z output of the operator
+
+ This field contains offset z of beams.
Returns
- ----------
- my_field_offset_z : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.beam_properties()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_offset_z = op.outputs.field_offset_z()
- """ # noqa: E501
+ """
return self._field_offset_z
diff --git a/src/ansys/dpf/core/operators/mesh/change_cs.py b/src/ansys/dpf/core/operators/mesh/change_cs.py
index bf5669e8a4f..763dd32b2a0 100644
--- a/src/ansys/dpf/core/operators/mesh/change_cs.py
+++ b/src/ansys/dpf/core/operators/mesh/change_cs.py
@@ -4,28 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class change_cs(Operator):
- """Applies a transformation (rotation and displacement) matrix on a mesh
- or meshes container.
+ r"""Applies a transformation (rotation and displacement) matrix on a mesh or
+ meshes container.
+
Parameters
----------
- meshes : MeshedRegion or MeshesContainer
- coordinate_system : Field
- 3-3 rotation matrix + 3 translations (x, y,
- z)
+ meshes: MeshedRegion or MeshesContainer
+ coordinate_system: Field
+ 3-3 rotation matrix + 3 translations (X, Y, Z)
Returns
-------
- meshed_region : MeshedRegion or MeshesContainer
+ meshed_region: MeshedRegion or MeshesContainer
Examples
--------
@@ -60,9 +64,10 @@ def __init__(self, meshes=None, coordinate_system=None, config=None, server=None
self.inputs.coordinate_system.connect(coordinate_system)
@staticmethod
- def _spec():
- description = """Applies a transformation (rotation and displacement) matrix on a mesh
- or meshes container."""
+ def _spec() -> Specification:
+ description = r"""Applies a transformation (rotation and displacement) matrix on a mesh or
+meshes container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -70,14 +75,13 @@ def _spec():
name="meshes",
type_names=["meshed_region", "meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="coordinate_system",
type_names=["field"],
optional=False,
- document="""3-3 rotation matrix + 3 translations (x, y,
- z)""",
+ document=r"""3-3 rotation matrix + 3 translations (X, Y, Z)""",
),
},
map_output_pin_spec={
@@ -85,14 +89,14 @@ def _spec():
name="meshed_region",
type_names=["meshed_region", "meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -101,29 +105,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh::change_cs", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsChangeCs:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsChangeCs
+ inputs:
+ An instance of InputsChangeCs.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsChangeCs:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsChangeCs
+ outputs:
+ An instance of OutputsChangeCs.
"""
return super().outputs
@@ -150,12 +161,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._coordinate_system)
@property
- def meshes(self):
- """Allows to connect meshes input to the operator.
+ def meshes(self) -> Input:
+ r"""Allows to connect meshes input to the operator.
- Parameters
- ----------
- my_meshes : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -168,15 +180,15 @@ def meshes(self):
return self._meshes
@property
- def coordinate_system(self):
- """Allows to connect coordinate_system input to the operator.
+ def coordinate_system(self) -> Input:
+ r"""Allows to connect coordinate_system input to the operator.
- 3-3 rotation matrix + 3 translations (x, y,
- z)
+ 3-3 rotation matrix + 3 translations (X, Y, Z)
- Parameters
- ----------
- my_coordinate_system : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
diff --git a/src/ansys/dpf/core/operators/mesh/combine_levelset.py b/src/ansys/dpf/core/operators/mesh/combine_levelset.py
index f14417d72ab..f6bbc775a5b 100644
--- a/src/ansys/dpf/core/operators/mesh/combine_levelset.py
+++ b/src/ansys/dpf/core/operators/mesh/combine_levelset.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class combine_levelset(Operator):
- """Takes two level sets and computes their binary union.
+ r"""Takes two level sets and computes their binary union.
+
Parameters
----------
- fieldA : Field
- fieldB : Field
+ fieldA: Field
+ fieldB: Field
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -56,8 +61,9 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = """Takes two level sets and computes their binary union."""
+ def _spec() -> Specification:
+ description = r"""Takes two level sets and computes their binary union.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,13 +71,13 @@ def _spec():
name="fieldA",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="fieldB",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -79,14 +85,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -95,29 +101,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="levelset::combine", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCombineLevelset:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCombineLevelset
+ inputs:
+ An instance of InputsCombineLevelset.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCombineLevelset:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCombineLevelset
+ outputs:
+ An instance of OutputsCombineLevelset.
"""
return super().outputs
@@ -144,12 +157,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Parameters
- ----------
- my_fieldA : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -162,12 +176,13 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Parameters
- ----------
- my_fieldB : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -198,18 +213,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.combine_levelset()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/mesh/decimate_mesh.py b/src/ansys/dpf/core/operators/mesh/decimate_mesh.py
index c294fee3a1b..5026c3541ef 100644
--- a/src/ansys/dpf/core/operators/mesh/decimate_mesh.py
+++ b/src/ansys/dpf/core/operators/mesh/decimate_mesh.py
@@ -4,35 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class decimate_mesh(Operator):
- """Decimate a meshed region
+ r"""Decimate a meshed region
+
Parameters
----------
- mesh : MeshedRegion
+ mesh: MeshedRegion
Mesh to decimate
- preservation_ratio : float, optional
- Target ratio of elements to preserve, the
- actual number of elements preserved
- might differ. default value is 0.5.
- aggressiveness : int, optional
- Quality measure for the resulting decimated
- mesh. lower aggresiveness will
- provide a higher quality mesh with
- the tradeoff of higher execution
- time. value range is 0 to 150,
- default is 0.
+ preservation_ratio: float, optional
+ Target ratio of elements to preserve, the actual number of elements preserved might differ. Default value is 0.5.
+ aggressiveness: int, optional
+ Quality measure for the resulting decimated mesh. Lower aggresiveness will provide a higher quality mesh with the tradeoff of higher execution time. Value range is 0 to 150, default is 0.
Returns
-------
- mesh : MeshedRegion
+ mesh: MeshedRegion
Decimated mesh with triangle elements
Examples
@@ -80,8 +78,9 @@ def __init__(
self.inputs.aggressiveness.connect(aggressiveness)
@staticmethod
- def _spec():
- description = """Decimate a meshed region"""
+ def _spec() -> Specification:
+ description = r"""Decimate a meshed region
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -89,26 +88,19 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""Mesh to decimate""",
+ document=r"""Mesh to decimate""",
),
1: PinSpecification(
name="preservation_ratio",
type_names=["double"],
optional=True,
- document="""Target ratio of elements to preserve, the
- actual number of elements preserved
- might differ. default value is 0.5.""",
+ document=r"""Target ratio of elements to preserve, the actual number of elements preserved might differ. Default value is 0.5.""",
),
2: PinSpecification(
name="aggressiveness",
type_names=["int32"],
optional=True,
- document="""Quality measure for the resulting decimated
- mesh. lower aggresiveness will
- provide a higher quality mesh with
- the tradeoff of higher execution
- time. value range is 0 to 150,
- default is 0.""",
+ document=r"""Quality measure for the resulting decimated mesh. Lower aggresiveness will provide a higher quality mesh with the tradeoff of higher execution time. Value range is 0 to 150, default is 0.""",
),
},
map_output_pin_spec={
@@ -116,14 +108,14 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""Decimated mesh with triangle elements""",
+ document=r"""Decimated mesh with triangle elements""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -132,29 +124,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="decimate_mesh", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsDecimateMesh:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsDecimateMesh
+ inputs:
+ An instance of InputsDecimateMesh.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsDecimateMesh:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsDecimateMesh
+ outputs:
+ An instance of OutputsDecimateMesh.
"""
return super().outputs
@@ -185,14 +184,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._aggressiveness)
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
Mesh to decimate
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -205,16 +205,15 @@ def mesh(self):
return self._mesh
@property
- def preservation_ratio(self):
- """Allows to connect preservation_ratio input to the operator.
+ def preservation_ratio(self) -> Input:
+ r"""Allows to connect preservation_ratio input to the operator.
- Target ratio of elements to preserve, the
- actual number of elements preserved
- might differ. default value is 0.5.
+ Target ratio of elements to preserve, the actual number of elements preserved might differ. Default value is 0.5.
- Parameters
- ----------
- my_preservation_ratio : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -227,19 +226,15 @@ def preservation_ratio(self):
return self._preservation_ratio
@property
- def aggressiveness(self):
- """Allows to connect aggressiveness input to the operator.
+ def aggressiveness(self) -> Input:
+ r"""Allows to connect aggressiveness input to the operator.
- Quality measure for the resulting decimated
- mesh. lower aggresiveness will
- provide a higher quality mesh with
- the tradeoff of higher execution
- time. value range is 0 to 150,
- default is 0.
+ Quality measure for the resulting decimated mesh. Lower aggresiveness will provide a higher quality mesh with the tradeoff of higher execution time. Value range is 0 to 150, default is 0.
- Parameters
- ----------
- my_aggressiveness : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -270,18 +265,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._mesh)
@property
- def mesh(self):
- """Allows to get mesh output of the operator
+ def mesh(self) -> Output:
+ r"""Allows to get mesh output of the operator
+
+ Decimated mesh with triangle elements
Returns
- ----------
- my_mesh : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.decimate_mesh()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mesh = op.outputs.mesh()
- """ # noqa: E501
+ """
return self._mesh
diff --git a/src/ansys/dpf/core/operators/mesh/exclude_levelset.py b/src/ansys/dpf/core/operators/mesh/exclude_levelset.py
index 8346bf00216..68f9733f0ce 100644
--- a/src/ansys/dpf/core/operators/mesh/exclude_levelset.py
+++ b/src/ansys/dpf/core/operators/mesh/exclude_levelset.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class exclude_levelset(Operator):
- """Takes two level sets and excludes the second one from the first.
+ r"""Takes two level sets and excludes the second one from the first.
+
Parameters
----------
- fieldA : Field
- fieldB : Field
+ fieldA: Field
+ fieldB: Field
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -56,10 +61,9 @@ def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
self.inputs.fieldB.connect(fieldB)
@staticmethod
- def _spec():
- description = (
- """Takes two level sets and excludes the second one from the first."""
- )
+ def _spec() -> Specification:
+ description = r"""Takes two level sets and excludes the second one from the first.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -67,13 +71,13 @@ def _spec():
name="fieldA",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="fieldB",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -81,14 +85,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -97,29 +101,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="levelset::exclude", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsExcludeLevelset:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsExcludeLevelset
+ inputs:
+ An instance of InputsExcludeLevelset.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsExcludeLevelset:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsExcludeLevelset
+ outputs:
+ An instance of OutputsExcludeLevelset.
"""
return super().outputs
@@ -146,12 +157,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fieldB)
@property
- def fieldA(self):
- """Allows to connect fieldA input to the operator.
+ def fieldA(self) -> Input:
+ r"""Allows to connect fieldA input to the operator.
- Parameters
- ----------
- my_fieldA : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -164,12 +176,13 @@ def fieldA(self):
return self._fieldA
@property
- def fieldB(self):
- """Allows to connect fieldB input to the operator.
+ def fieldB(self) -> Input:
+ r"""Allows to connect fieldB input to the operator.
- Parameters
- ----------
- my_fieldB : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -200,18 +213,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.exclude_levelset()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/mesh/external_layer.py b/src/ansys/dpf/core/operators/mesh/external_layer.py
index 2319075f72f..1bb5035d454 100644
--- a/src/ansys/dpf/core/operators/mesh/external_layer.py
+++ b/src/ansys/dpf/core/operators/mesh/external_layer.py
@@ -4,26 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class external_layer(Operator):
- """Extracts the external layer (thick skin) of the mesh (3D elements) in
- a new meshed region.
+ r"""Extracts the external layer (thick skin) of the mesh (3D elements) in a
+ new meshed region.
+
Parameters
----------
- mesh : MeshedRegion
+ mesh: MeshedRegion
Returns
-------
- mesh : MeshedRegion
- nodes_mesh_scoping : Scoping
- elements_mesh_scoping : Scoping
+ mesh: MeshedRegion
+ nodes_mesh_scoping: Scoping
+ elements_mesh_scoping: Scoping
Examples
--------
@@ -57,9 +62,10 @@ def __init__(self, mesh=None, config=None, server=None):
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Extracts the external layer (thick skin) of the mesh (3D elements) in
- a new meshed region."""
+ def _spec() -> Specification:
+ description = r"""Extracts the external layer (thick skin) of the mesh (3D elements) in a
+new meshed region.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -67,7 +73,7 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -75,26 +81,26 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="nodes_mesh_scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="elements_mesh_scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -103,31 +109,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="meshed_external_layer_sector", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsExternalLayer:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsExternalLayer
+ inputs:
+ An instance of InputsExternalLayer.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsExternalLayer:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsExternalLayer
+ outputs:
+ An instance of OutputsExternalLayer.
"""
return super().outputs
@@ -150,12 +163,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -194,52 +208,55 @@ def __init__(self, op: Operator):
self._outputs.append(self._elements_mesh_scoping)
@property
- def mesh(self):
- """Allows to get mesh output of the operator
+ def mesh(self) -> Output:
+ r"""Allows to get mesh output of the operator
Returns
- ----------
- my_mesh : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.external_layer()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mesh = op.outputs.mesh()
- """ # noqa: E501
+ """
return self._mesh
@property
- def nodes_mesh_scoping(self):
- """Allows to get nodes_mesh_scoping output of the operator
+ def nodes_mesh_scoping(self) -> Output:
+ r"""Allows to get nodes_mesh_scoping output of the operator
Returns
- ----------
- my_nodes_mesh_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.external_layer()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping()
- """ # noqa: E501
+ """
return self._nodes_mesh_scoping
@property
- def elements_mesh_scoping(self):
- """Allows to get elements_mesh_scoping output of the operator
+ def elements_mesh_scoping(self) -> Output:
+ r"""Allows to get elements_mesh_scoping output of the operator
Returns
- ----------
- my_elements_mesh_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.external_layer()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_elements_mesh_scoping = op.outputs.elements_mesh_scoping()
- """ # noqa: E501
+ """
return self._elements_mesh_scoping
diff --git a/src/ansys/dpf/core/operators/mesh/from_field.py b/src/ansys/dpf/core/operators/mesh/from_field.py
index ff511b0a03d..d17fdb4b9d0 100644
--- a/src/ansys/dpf/core/operators/mesh/from_field.py
+++ b/src/ansys/dpf/core/operators/mesh/from_field.py
@@ -4,23 +4,28 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class from_field(Operator):
- """Returns the meshed region contained in the support of the mesh.
+ r"""Returns the meshed region contained in the support of the mesh.
+
Parameters
----------
- field : Field
+ field: Field
Returns
-------
- mesh : MeshedRegion
+ mesh: MeshedRegion
Examples
--------
@@ -50,10 +55,9 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = (
- """Returns the meshed region contained in the support of the mesh."""
- )
+ def _spec() -> Specification:
+ description = r"""Returns the meshed region contained in the support of the mesh.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -61,7 +65,7 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -69,14 +73,14 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -85,29 +89,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="GetSupportFromField", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFromField:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFromField
+ inputs:
+ An instance of InputsFromField.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFromField:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFromField
+ outputs:
+ An instance of OutputsFromField.
"""
return super().outputs
@@ -130,12 +141,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -166,18 +178,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._mesh)
@property
- def mesh(self):
- """Allows to get mesh output of the operator
+ def mesh(self) -> Output:
+ r"""Allows to get mesh output of the operator
Returns
- ----------
- my_mesh : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.from_field()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mesh = op.outputs.mesh()
- """ # noqa: E501
+ """
return self._mesh
diff --git a/src/ansys/dpf/core/operators/mesh/from_scoping.py b/src/ansys/dpf/core/operators/mesh/from_scoping.py
index bd9a2fb5d6a..6c1d5c84d4b 100644
--- a/src/ansys/dpf/core/operators/mesh/from_scoping.py
+++ b/src/ansys/dpf/core/operators/mesh/from_scoping.py
@@ -4,44 +4,39 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class from_scoping(Operator):
- """Extracts a meshed region from another meshed region based on a
- scoping. Regarding the property fields of the meshed region: the
- 'Elemental', 'Face', and 'Nodal' property fields are scoped to the
- elements, faces or nodes of the output mesh, the 'Global' property
- fields are transferred from the input mesh to the output mesh
- without changes, and the rest of the property fields are not
- present in the output mesh.
+ r"""Extracts a meshed region from another meshed region based on a scoping.
+ Regarding the property fields of the meshed region: the ‘Elemental’,
+ ‘Face’, and ‘Nodal’ property fields are scoped to the elements, faces or
+ nodes of the output mesh, the ‘Global’ property fields are transferred
+ from the input mesh to the output mesh without changes, and the rest of
+ the property fields are not present in the output mesh.
+
Parameters
----------
- scoping : Scoping
- If nodal/face scoping, then the scoping is
- transposed respecting the inclusive
- pin
- inclusive : int, optional
- If inclusive == 1 then all the elements/faces
- adjacent to the nodes/faces ids in
- input are added, if inclusive == 0,
- only the elements/faces which have
- all their nodes/faces in the scoping
- are included
- nodes_only : bool, optional
- Returns mesh with nodes only (without any
- elements or property fields). default
- is false.
- mesh : MeshedRegion
+ scoping: Scoping
+ if nodal/face scoping, then the scoping is transposed respecting the inclusive pin
+ inclusive: int, optional
+ if inclusive == 1 then all the elements/faces adjacent to the nodes/faces ids in input are added, if inclusive == 0, only the elements/faces which have all their nodes/faces in the scoping are included
+ nodes_only: bool, optional
+ returns mesh with nodes only (without any elements or property fields). Default is false.
+ mesh: MeshedRegion
Returns
-------
- mesh : MeshedRegion
+ mesh: MeshedRegion
Examples
--------
@@ -94,15 +89,14 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Extracts a meshed region from another meshed region based on a
- scoping. Regarding the property fields of the meshed
- region: the 'Elemental', 'Face', and 'Nodal' property
- fields are scoped to the elements, faces or nodes of the
- output mesh, the 'Global' property fields are transferred
- from the input mesh to the output mesh without changes,
- and the rest of the property fields are not present in the
- output mesh."""
+ def _spec() -> Specification:
+ description = r"""Extracts a meshed region from another meshed region based on a scoping.
+Regarding the property fields of the meshed region: the ‘Elemental’,
+‘Face’, and ‘Nodal’ property fields are scoped to the elements, faces or
+nodes of the output mesh, the ‘Global’ property fields are transferred
+from the input mesh to the output mesh without changes, and the rest of
+the property fields are not present in the output mesh.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -110,34 +104,25 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=False,
- document="""If nodal/face scoping, then the scoping is
- transposed respecting the inclusive
- pin""",
+ document=r"""if nodal/face scoping, then the scoping is transposed respecting the inclusive pin""",
),
2: PinSpecification(
name="inclusive",
type_names=["int32"],
optional=True,
- document="""If inclusive == 1 then all the elements/faces
- adjacent to the nodes/faces ids in
- input are added, if inclusive == 0,
- only the elements/faces which have
- all their nodes/faces in the scoping
- are included""",
+ document=r"""if inclusive == 1 then all the elements/faces adjacent to the nodes/faces ids in input are added, if inclusive == 0, only the elements/faces which have all their nodes/faces in the scoping are included""",
),
3: PinSpecification(
name="nodes_only",
type_names=["bool"],
optional=True,
- document="""Returns mesh with nodes only (without any
- elements or property fields). default
- is false.""",
+ document=r"""returns mesh with nodes only (without any elements or property fields). Default is false.""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -145,14 +130,14 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -161,29 +146,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh::by_scoping", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFromScoping:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFromScoping
+ inputs:
+ An instance of InputsFromScoping.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFromScoping:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFromScoping
+ outputs:
+ An instance of OutputsFromScoping.
"""
return super().outputs
@@ -218,16 +210,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- If nodal/face scoping, then the scoping is
- transposed respecting the inclusive
- pin
+ if nodal/face scoping, then the scoping is transposed respecting the inclusive pin
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -240,19 +231,15 @@ def scoping(self):
return self._scoping
@property
- def inclusive(self):
- """Allows to connect inclusive input to the operator.
+ def inclusive(self) -> Input:
+ r"""Allows to connect inclusive input to the operator.
- If inclusive == 1 then all the elements/faces
- adjacent to the nodes/faces ids in
- input are added, if inclusive == 0,
- only the elements/faces which have
- all their nodes/faces in the scoping
- are included
+ if inclusive == 1 then all the elements/faces adjacent to the nodes/faces ids in input are added, if inclusive == 0, only the elements/faces which have all their nodes/faces in the scoping are included
- Parameters
- ----------
- my_inclusive : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -265,16 +252,15 @@ def inclusive(self):
return self._inclusive
@property
- def nodes_only(self):
- """Allows to connect nodes_only input to the operator.
+ def nodes_only(self) -> Input:
+ r"""Allows to connect nodes_only input to the operator.
- Returns mesh with nodes only (without any
- elements or property fields). default
- is false.
+ returns mesh with nodes only (without any elements or property fields). Default is false.
- Parameters
- ----------
- my_nodes_only : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -287,12 +273,13 @@ def nodes_only(self):
return self._nodes_only
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -323,18 +310,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._mesh)
@property
- def mesh(self):
- """Allows to get mesh output of the operator
+ def mesh(self) -> Output:
+ r"""Allows to get mesh output of the operator
Returns
- ----------
- my_mesh : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.from_scoping()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mesh = op.outputs.mesh()
- """ # noqa: E501
+ """
return self._mesh
diff --git a/src/ansys/dpf/core/operators/mesh/from_scopings.py b/src/ansys/dpf/core/operators/mesh/from_scopings.py
index 3bb0b048d4e..11dca7d565c 100644
--- a/src/ansys/dpf/core/operators/mesh/from_scopings.py
+++ b/src/ansys/dpf/core/operators/mesh/from_scopings.py
@@ -4,38 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class from_scopings(Operator):
- """Extracts multiple meshed region base on a scoping and saved in a
+ r"""Extracts multiple meshed region base on a scoping and saved in a
MeshesContainer
+
Parameters
----------
- scopings_container : ScopingsContainer
- If nodal scoping, then the scoping is
- transposed respecting the inclusive
- pin
- inclusive : int, optional
- If inclusive == 1 then all the elements/faces
- adjacent to the nodes/faces ids in
- input are added, if inclusive == 0,
- only the elements/faces which have
- all their nodes/faces in the scoping
- are included
- nodes_only : bool, optional
- Returns mesh with nodes only (without any
- elements). default is false.
- mesh : MeshedRegion
+ scopings_container: ScopingsContainer
+ if nodal scoping, then the scoping is transposed respecting the inclusive pin
+ inclusive: int, optional
+ if inclusive == 1 then all the elements/faces adjacent to the nodes/faces ids in input are added, if inclusive == 0, only the elements/faces which have all their nodes/faces in the scoping are included
+ nodes_only: bool, optional
+ returns mesh with nodes only (without any elements). Default is false.
+ mesh: MeshedRegion
Returns
-------
- meshes : MeshesContainer
+ meshes: MeshesContainer
Examples
--------
@@ -88,9 +85,10 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Extracts multiple meshed region base on a scoping and saved in a
- MeshesContainer"""
+ def _spec() -> Specification:
+ description = r"""Extracts multiple meshed region base on a scoping and saved in a
+MeshesContainer
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -98,33 +96,25 @@ def _spec():
name="scopings_container",
type_names=["scopings_container"],
optional=False,
- document="""If nodal scoping, then the scoping is
- transposed respecting the inclusive
- pin""",
+ document=r"""if nodal scoping, then the scoping is transposed respecting the inclusive pin""",
),
2: PinSpecification(
name="inclusive",
type_names=["int32"],
optional=True,
- document="""If inclusive == 1 then all the elements/faces
- adjacent to the nodes/faces ids in
- input are added, if inclusive == 0,
- only the elements/faces which have
- all their nodes/faces in the scoping
- are included""",
+ document=r"""if inclusive == 1 then all the elements/faces adjacent to the nodes/faces ids in input are added, if inclusive == 0, only the elements/faces which have all their nodes/faces in the scoping are included""",
),
3: PinSpecification(
name="nodes_only",
type_names=["bool"],
optional=True,
- document="""Returns mesh with nodes only (without any
- elements). default is false.""",
+ document=r"""returns mesh with nodes only (without any elements). Default is false.""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -132,14 +122,14 @@ def _spec():
name="meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -148,29 +138,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="meshes::by_scopings", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsFromScopings:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsFromScopings
+ inputs:
+ An instance of InputsFromScopings.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsFromScopings:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsFromScopings
+ outputs:
+ An instance of OutputsFromScopings.
"""
return super().outputs
@@ -205,16 +202,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def scopings_container(self):
- """Allows to connect scopings_container input to the operator.
+ def scopings_container(self) -> Input:
+ r"""Allows to connect scopings_container input to the operator.
- If nodal scoping, then the scoping is
- transposed respecting the inclusive
- pin
+ if nodal scoping, then the scoping is transposed respecting the inclusive pin
- Parameters
- ----------
- my_scopings_container : ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -227,19 +223,15 @@ def scopings_container(self):
return self._scopings_container
@property
- def inclusive(self):
- """Allows to connect inclusive input to the operator.
+ def inclusive(self) -> Input:
+ r"""Allows to connect inclusive input to the operator.
- If inclusive == 1 then all the elements/faces
- adjacent to the nodes/faces ids in
- input are added, if inclusive == 0,
- only the elements/faces which have
- all their nodes/faces in the scoping
- are included
+ if inclusive == 1 then all the elements/faces adjacent to the nodes/faces ids in input are added, if inclusive == 0, only the elements/faces which have all their nodes/faces in the scoping are included
- Parameters
- ----------
- my_inclusive : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -252,15 +244,15 @@ def inclusive(self):
return self._inclusive
@property
- def nodes_only(self):
- """Allows to connect nodes_only input to the operator.
+ def nodes_only(self) -> Input:
+ r"""Allows to connect nodes_only input to the operator.
- Returns mesh with nodes only (without any
- elements). default is false.
+ returns mesh with nodes only (without any elements). Default is false.
- Parameters
- ----------
- my_nodes_only : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -273,12 +265,13 @@ def nodes_only(self):
return self._nodes_only
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -309,18 +302,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._meshes)
@property
- def meshes(self):
- """Allows to get meshes output of the operator
+ def meshes(self) -> Output:
+ r"""Allows to get meshes output of the operator
Returns
- ----------
- my_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.from_scopings()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_meshes = op.outputs.meshes()
- """ # noqa: E501
+ """
return self._meshes
diff --git a/src/ansys/dpf/core/operators/mesh/iso_surfaces.py b/src/ansys/dpf/core/operators/mesh/iso_surfaces.py
index 809a71013b3..aec39508421 100644
--- a/src/ansys/dpf/core/operators/mesh/iso_surfaces.py
+++ b/src/ansys/dpf/core/operators/mesh/iso_surfaces.py
@@ -4,52 +4,42 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class iso_surfaces(Operator):
- """Extract multiple iso-contours from mesh_cut operator and set it into a
- meshes container. If pin 1 is provided, 'num_surfaces' iso-
- contours will be computed, ranging from 'min_value' to 'max_value'
- linearly. If pin 4 is provided, the iso-values are the one set by
- the user. The iso-values are stored into a FieldsContainer.
+ r"""Extract multiple iso-contours from mesh_cut operator and set it into a
+ meshes container. If pin 1 is provided, “num_surfaces” iso-contours will
+ be computed, ranging from “min_value” to “max_value” linearly. If pin 4
+ is provided, the iso-values are the one set by the user. The iso-values
+ are stored into a FieldsContainer.
+
Parameters
----------
- field : Field
- Field containing the values for the iso-
- surface computation. the mesh can be
- retrieved from this field's support
- or through pin 2.
- num_surfaces : int, optional
- If provided, iso_values are linearly computed
- between the min and the max of the
- field of results. if not, iso_values
- must be provided by the user through
- pin 4
- mesh : MeshedRegion, optional
- Mesh to compute the iso-surface from. used
- when not given through the support of
- the field in pin 0.
- slice_surfaces : bool
- True: slicing will also take into account
- shell and skin elements. false:
- slicing will ignore shell and skin
- elements. the default is true.
- vector_iso_values : optional
- If provided, user defined iso_values to
- compute. if not provided, iso_values
- are linearly compute between the min
- and the max of the field of results.
+ field: Field
+ Field containing the values for the iso-surface computation. The mesh can be retrieved from this field's support or through pin 2.
+ num_surfaces: int, optional
+ If provided, iso_values are linearly computed between the min and the max of the field of results. If not, iso_values must be provided by the user through pin 4
+ mesh: MeshedRegion, optional
+ Mesh to compute the iso-surface from. Used when not given through the support of the field in pin 0.
+ slice_surfaces: bool
+ True: slicing will also take into account shell and skin elements. False: slicing will ignore shell and skin elements. The default is true.
+ vector_iso_values: optional
+ If provided, user defined iso_values to compute. If not provided, iso_values are linearly compute between the min and the max of the field of results.
Returns
-------
- meshes : MeshesContainer
- fields_container : FieldsContainer
+ meshes: MeshesContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -109,13 +99,13 @@ def __init__(
self.inputs.vector_iso_values.connect(vector_iso_values)
@staticmethod
- def _spec():
- description = """Extract multiple iso-contours from mesh_cut operator and set it into a
- meshes container. If pin 1 is provided, "num_surfaces"
- iso-contours will be computed, ranging from "min_value" to
- "max_value" linearly. If pin 4 is provided, the iso-values
- are the one set by the user. The iso-values are stored
- into a FieldsContainer."""
+ def _spec() -> Specification:
+ description = r"""Extract multiple iso-contours from mesh_cut operator and set it into a
+meshes container. If pin 1 is provided, “num_surfaces” iso-contours will
+be computed, ranging from “min_value” to “max_value” linearly. If pin 4
+is provided, the iso-values are the one set by the user. The iso-values
+are stored into a FieldsContainer.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -123,46 +113,31 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""Field containing the values for the iso-
- surface computation. the mesh can be
- retrieved from this field's support
- or through pin 2.""",
+ document=r"""Field containing the values for the iso-surface computation. The mesh can be retrieved from this field's support or through pin 2.""",
),
1: PinSpecification(
name="num_surfaces",
type_names=["int32"],
optional=True,
- document="""If provided, iso_values are linearly computed
- between the min and the max of the
- field of results. if not, iso_values
- must be provided by the user through
- pin 4""",
+ document=r"""If provided, iso_values are linearly computed between the min and the max of the field of results. If not, iso_values must be provided by the user through pin 4""",
),
2: PinSpecification(
name="mesh",
type_names=["meshed_region"],
optional=True,
- document="""Mesh to compute the iso-surface from. used
- when not given through the support of
- the field in pin 0.""",
+ document=r"""Mesh to compute the iso-surface from. Used when not given through the support of the field in pin 0.""",
),
3: PinSpecification(
name="slice_surfaces",
type_names=["bool"],
optional=False,
- document="""True: slicing will also take into account
- shell and skin elements. false:
- slicing will ignore shell and skin
- elements. the default is true.""",
+ document=r"""True: slicing will also take into account shell and skin elements. False: slicing will ignore shell and skin elements. The default is true.""",
),
4: PinSpecification(
name="vector_iso_values",
type_names=["vector"],
optional=True,
- document="""If provided, user defined iso_values to
- compute. if not provided, iso_values
- are linearly compute between the min
- and the max of the field of results.""",
+ document=r"""If provided, user defined iso_values to compute. If not provided, iso_values are linearly compute between the min and the max of the field of results.""",
),
},
map_output_pin_spec={
@@ -170,20 +145,20 @@ def _spec():
name="meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -192,29 +167,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="iso_surfaces", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsIsoSurfaces:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIsoSurfaces
+ inputs:
+ An instance of InputsIsoSurfaces.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIsoSurfaces:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIsoSurfaces
+ outputs:
+ An instance of OutputsIsoSurfaces.
"""
return super().outputs
@@ -253,17 +235,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._vector_iso_values)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field containing the values for the iso-
- surface computation. the mesh can be
- retrieved from this field's support
- or through pin 2.
+ Field containing the values for the iso-surface computation. The mesh can be retrieved from this field's support or through pin 2.
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -276,18 +256,15 @@ def field(self):
return self._field
@property
- def num_surfaces(self):
- """Allows to connect num_surfaces input to the operator.
+ def num_surfaces(self) -> Input:
+ r"""Allows to connect num_surfaces input to the operator.
- If provided, iso_values are linearly computed
- between the min and the max of the
- field of results. if not, iso_values
- must be provided by the user through
- pin 4
+ If provided, iso_values are linearly computed between the min and the max of the field of results. If not, iso_values must be provided by the user through pin 4
- Parameters
- ----------
- my_num_surfaces : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -300,16 +277,15 @@ def num_surfaces(self):
return self._num_surfaces
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh to compute the iso-surface from. used
- when not given through the support of
- the field in pin 0.
+ Mesh to compute the iso-surface from. Used when not given through the support of the field in pin 0.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -322,17 +298,15 @@ def mesh(self):
return self._mesh
@property
- def slice_surfaces(self):
- """Allows to connect slice_surfaces input to the operator.
+ def slice_surfaces(self) -> Input:
+ r"""Allows to connect slice_surfaces input to the operator.
- True: slicing will also take into account
- shell and skin elements. false:
- slicing will ignore shell and skin
- elements. the default is true.
+ True: slicing will also take into account shell and skin elements. False: slicing will ignore shell and skin elements. The default is true.
- Parameters
- ----------
- my_slice_surfaces : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -345,17 +319,15 @@ def slice_surfaces(self):
return self._slice_surfaces
@property
- def vector_iso_values(self):
- """Allows to connect vector_iso_values input to the operator.
+ def vector_iso_values(self) -> Input:
+ r"""Allows to connect vector_iso_values input to the operator.
- If provided, user defined iso_values to
- compute. if not provided, iso_values
- are linearly compute between the min
- and the max of the field of results.
+ If provided, user defined iso_values to compute. If not provided, iso_values are linearly compute between the min and the max of the field of results.
- Parameters
- ----------
- my_vector_iso_values :
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -389,35 +361,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def meshes(self):
- """Allows to get meshes output of the operator
+ def meshes(self) -> Output:
+ r"""Allows to get meshes output of the operator
Returns
- ----------
- my_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.iso_surfaces()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_meshes = op.outputs.meshes()
- """ # noqa: E501
+ """
return self._meshes
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.iso_surfaces()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/mesh/make_plane_levelset.py b/src/ansys/dpf/core/operators/mesh/make_plane_levelset.py
index 0f51475b98d..57743123138 100644
--- a/src/ansys/dpf/core/operators/mesh/make_plane_levelset.py
+++ b/src/ansys/dpf/core/operators/mesh/make_plane_levelset.py
@@ -4,29 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class make_plane_levelset(Operator):
- """Computes the level set for a plane using coordinates.
+ r"""Computes the level set for a plane using coordinates.
+
Parameters
----------
- coordinates : MeshedRegion or Field
- normal : Field
- An overall 3d vector that gives the normal
- direction of the plane.
- origin : Field
- An overall 3d vector that gives a point of
- the plane.
+ coordinates: MeshedRegion or Field
+ normal: Field
+ An overall 3D vector that gives the normal direction of the plane.
+ origin: Field
+ An overall 3d vector that gives a point of the plane.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -68,8 +71,9 @@ def __init__(
self.inputs.origin.connect(origin)
@staticmethod
- def _spec():
- description = """Computes the level set for a plane using coordinates."""
+ def _spec() -> Specification:
+ description = r"""Computes the level set for a plane using coordinates.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -77,21 +81,19 @@ def _spec():
name="coordinates",
type_names=["abstract_meshed_region", "field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="normal",
type_names=["field"],
optional=False,
- document="""An overall 3d vector that gives the normal
- direction of the plane.""",
+ document=r"""An overall 3D vector that gives the normal direction of the plane.""",
),
2: PinSpecification(
name="origin",
type_names=["field"],
optional=False,
- document="""An overall 3d vector that gives a point of
- the plane.""",
+ document=r"""An overall 3d vector that gives a point of the plane.""",
),
},
map_output_pin_spec={
@@ -99,14 +101,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -115,29 +117,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="levelset::make_plane", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMakePlaneLevelset:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMakePlaneLevelset
+ inputs:
+ An instance of InputsMakePlaneLevelset.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMakePlaneLevelset:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMakePlaneLevelset
+ outputs:
+ An instance of OutputsMakePlaneLevelset.
"""
return super().outputs
@@ -168,12 +177,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._origin)
@property
- def coordinates(self):
- """Allows to connect coordinates input to the operator.
+ def coordinates(self) -> Input:
+ r"""Allows to connect coordinates input to the operator.
- Parameters
- ----------
- my_coordinates : MeshedRegion or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -186,15 +196,15 @@ def coordinates(self):
return self._coordinates
@property
- def normal(self):
- """Allows to connect normal input to the operator.
+ def normal(self) -> Input:
+ r"""Allows to connect normal input to the operator.
- An overall 3d vector that gives the normal
- direction of the plane.
+ An overall 3D vector that gives the normal direction of the plane.
- Parameters
- ----------
- my_normal : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -207,15 +217,15 @@ def normal(self):
return self._normal
@property
- def origin(self):
- """Allows to connect origin input to the operator.
+ def origin(self) -> Input:
+ r"""Allows to connect origin input to the operator.
- An overall 3d vector that gives a point of
- the plane.
+ An overall 3d vector that gives a point of the plane.
- Parameters
- ----------
- my_origin : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -246,18 +256,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.make_plane_levelset()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/mesh/make_sphere_levelset.py b/src/ansys/dpf/core/operators/mesh/make_sphere_levelset.py
index a85da5376db..2dfc57ec0c8 100644
--- a/src/ansys/dpf/core/operators/mesh/make_sphere_levelset.py
+++ b/src/ansys/dpf/core/operators/mesh/make_sphere_levelset.py
@@ -4,28 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class make_sphere_levelset(Operator):
- """Computes the level set for a sphere using coordinates.
+ r"""Computes the level set for a sphere using coordinates.
+
Parameters
----------
- coordinates : MeshedRegion or Field
- origin : Field
- An overall 3d vector that gives a point of
- the plane.
- radius : float
+ coordinates: MeshedRegion or Field
+ origin: Field
+ An overall 3d vector that gives a point of the plane.
+ radius: float
Sphere radius.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -67,8 +71,9 @@ def __init__(
self.inputs.radius.connect(radius)
@staticmethod
- def _spec():
- description = """Computes the level set for a sphere using coordinates."""
+ def _spec() -> Specification:
+ description = r"""Computes the level set for a sphere using coordinates.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -76,20 +81,19 @@ def _spec():
name="coordinates",
type_names=["abstract_meshed_region", "field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="origin",
type_names=["field"],
optional=False,
- document="""An overall 3d vector that gives a point of
- the plane.""",
+ document=r"""An overall 3d vector that gives a point of the plane.""",
),
2: PinSpecification(
name="radius",
type_names=["double"],
optional=False,
- document="""Sphere radius.""",
+ document=r"""Sphere radius.""",
),
},
map_output_pin_spec={
@@ -97,14 +101,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -113,29 +117,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="levelset::make_sphere", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMakeSphereLevelset:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMakeSphereLevelset
+ inputs:
+ An instance of InputsMakeSphereLevelset.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMakeSphereLevelset:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMakeSphereLevelset
+ outputs:
+ An instance of OutputsMakeSphereLevelset.
"""
return super().outputs
@@ -166,12 +177,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._radius)
@property
- def coordinates(self):
- """Allows to connect coordinates input to the operator.
+ def coordinates(self) -> Input:
+ r"""Allows to connect coordinates input to the operator.
- Parameters
- ----------
- my_coordinates : MeshedRegion or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -184,15 +196,15 @@ def coordinates(self):
return self._coordinates
@property
- def origin(self):
- """Allows to connect origin input to the operator.
+ def origin(self) -> Input:
+ r"""Allows to connect origin input to the operator.
- An overall 3d vector that gives a point of
- the plane.
+ An overall 3d vector that gives a point of the plane.
- Parameters
- ----------
- my_origin : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -205,14 +217,15 @@ def origin(self):
return self._origin
@property
- def radius(self):
- """Allows to connect radius input to the operator.
+ def radius(self) -> Input:
+ r"""Allows to connect radius input to the operator.
Sphere radius.
- Parameters
- ----------
- my_radius : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -243,18 +256,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.make_sphere_levelset()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/mesh/mesh_clip.py b/src/ansys/dpf/core/operators/mesh/mesh_clip.py
index fc8481976c9..4251aa09fdc 100644
--- a/src/ansys/dpf/core/operators/mesh/mesh_clip.py
+++ b/src/ansys/dpf/core/operators/mesh/mesh_clip.py
@@ -4,35 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class mesh_clip(Operator):
- """Clips a volume mesh along an iso value x, and construct the volume
- mesh defined by v < x.
+ r"""Clips a volume mesh along an iso value x, and construct the volume mesh
+ defined by v < x.
+
Parameters
----------
- field : Field
- iso_value : float
- Iso value
- closed_surface : int
+ field: Field
+ iso_value: float
+ iso value
+ closed_surface: int
1: closed surface, 0: iso surface.
- mesh : MeshedRegion, optional
- slice_surfaces : bool
- True: slicing will also take into account
- shell and skin elements. false:
- slicing will ignore shell and skin
- elements. the default is true.
+ mesh: MeshedRegion, optional
+ slice_surfaces: bool
+ True: slicing will also take into account shell and skin elements. False: slicing will ignore shell and skin elements. The default is true.
Returns
-------
- field : Field
- mesh : MeshedRegion
+ field: Field
+ mesh: MeshedRegion
Examples
--------
@@ -92,9 +94,10 @@ def __init__(
self.inputs.slice_surfaces.connect(slice_surfaces)
@staticmethod
- def _spec():
- description = """Clips a volume mesh along an iso value x, and construct the volume
- mesh defined by v < x."""
+ def _spec() -> Specification:
+ description = r"""Clips a volume mesh along an iso value x, and construct the volume mesh
+defined by v < x.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -102,34 +105,31 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="iso_value",
type_names=["double"],
optional=False,
- document="""Iso value""",
+ document=r"""iso value""",
),
2: PinSpecification(
name="closed_surface",
type_names=["int32"],
optional=False,
- document="""1: closed surface, 0: iso surface.""",
+ document=r"""1: closed surface, 0: iso surface.""",
),
3: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""""",
+ document=r"""""",
),
4: PinSpecification(
name="slice_surfaces",
type_names=["bool"],
optional=False,
- document="""True: slicing will also take into account
- shell and skin elements. false:
- slicing will ignore shell and skin
- elements. the default is true.""",
+ document=r"""True: slicing will also take into account shell and skin elements. False: slicing will ignore shell and skin elements. The default is true.""",
),
},
map_output_pin_spec={
@@ -137,20 +137,20 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="mesh",
type_names=["meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -159,29 +159,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh_clip", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMeshClip:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMeshClip
+ inputs:
+ An instance of InputsMeshClip.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMeshClip:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMeshClip
+ outputs:
+ An instance of OutputsMeshClip.
"""
return super().outputs
@@ -220,12 +227,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._slice_surfaces)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -238,14 +246,15 @@ def field(self):
return self._field
@property
- def iso_value(self):
- """Allows to connect iso_value input to the operator.
+ def iso_value(self) -> Input:
+ r"""Allows to connect iso_value input to the operator.
- Iso value
+ iso value
- Parameters
- ----------
- my_iso_value : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -258,14 +267,15 @@ def iso_value(self):
return self._iso_value
@property
- def closed_surface(self):
- """Allows to connect closed_surface input to the operator.
+ def closed_surface(self) -> Input:
+ r"""Allows to connect closed_surface input to the operator.
1: closed surface, 0: iso surface.
- Parameters
- ----------
- my_closed_surface : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -278,12 +288,13 @@ def closed_surface(self):
return self._closed_surface
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -296,17 +307,15 @@ def mesh(self):
return self._mesh
@property
- def slice_surfaces(self):
- """Allows to connect slice_surfaces input to the operator.
+ def slice_surfaces(self) -> Input:
+ r"""Allows to connect slice_surfaces input to the operator.
- True: slicing will also take into account
- shell and skin elements. false:
- slicing will ignore shell and skin
- elements. the default is true.
+ True: slicing will also take into account shell and skin elements. False: slicing will ignore shell and skin elements. The default is true.
- Parameters
- ----------
- my_slice_surfaces : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -340,35 +349,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._mesh)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_clip()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
@property
- def mesh(self):
- """Allows to get mesh output of the operator
+ def mesh(self) -> Output:
+ r"""Allows to get mesh output of the operator
Returns
- ----------
- my_mesh : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_clip()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mesh = op.outputs.mesh()
- """ # noqa: E501
+ """
return self._mesh
diff --git a/src/ansys/dpf/core/operators/mesh/mesh_cut.py b/src/ansys/dpf/core/operators/mesh/mesh_cut.py
index e5398a170a4..cdf3e0745ae 100644
--- a/src/ansys/dpf/core/operators/mesh/mesh_cut.py
+++ b/src/ansys/dpf/core/operators/mesh/mesh_cut.py
@@ -4,40 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class mesh_cut(Operator):
- """Extracts a skin of the mesh in triangles in a new meshed region.
+ r"""Extracts a skin of the mesh in triangles in a new meshed region.
+
Parameters
----------
- field : Field
- Field containing the values for the iso-
- surface computation. the mesh can be
- retrieved from this field's support
- or through pin 2.
- iso_value : float
- Iso value
- closed_surface : int
+ field: Field
+ Field containing the values for the iso-surface computation. The mesh can be retrieved from this field's support or through pin 2.
+ iso_value: float
+ iso value
+ closed_surface: int
1: closed surface, 0: iso surface.
- mesh : MeshedRegion, optional
- Mesh to compute the iso-surface from. used
- when not given through the support of
- the field in pin 0.
- slice_surfaces : bool
- True: slicing will also take into account
- shell and skin elements. false:
- slicing will ignore shell and skin
- elements. the default is true.
+ mesh: MeshedRegion, optional
+ Mesh to compute the iso-surface from. Used when not given through the support of the field in pin 0.
+ slice_surfaces: bool
+ True: slicing will also take into account shell and skin elements. False: slicing will ignore shell and skin elements. The default is true.
Returns
-------
- mesh : MeshedRegion
+ mesh: MeshedRegion
Examples
--------
@@ -96,10 +93,9 @@ def __init__(
self.inputs.slice_surfaces.connect(slice_surfaces)
@staticmethod
- def _spec():
- description = (
- """Extracts a skin of the mesh in triangles in a new meshed region."""
- )
+ def _spec() -> Specification:
+ description = r"""Extracts a skin of the mesh in triangles in a new meshed region.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -107,39 +103,31 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""Field containing the values for the iso-
- surface computation. the mesh can be
- retrieved from this field's support
- or through pin 2.""",
+ document=r"""Field containing the values for the iso-surface computation. The mesh can be retrieved from this field's support or through pin 2.""",
),
1: PinSpecification(
name="iso_value",
type_names=["double"],
optional=False,
- document="""Iso value""",
+ document=r"""iso value""",
),
2: PinSpecification(
name="closed_surface",
type_names=["int32"],
optional=False,
- document="""1: closed surface, 0: iso surface.""",
+ document=r"""1: closed surface, 0: iso surface.""",
),
3: PinSpecification(
name="mesh",
type_names=["meshed_region"],
optional=True,
- document="""Mesh to compute the iso-surface from. used
- when not given through the support of
- the field in pin 0.""",
+ document=r"""Mesh to compute the iso-surface from. Used when not given through the support of the field in pin 0.""",
),
4: PinSpecification(
name="slice_surfaces",
type_names=["bool"],
optional=False,
- document="""True: slicing will also take into account
- shell and skin elements. false:
- slicing will ignore shell and skin
- elements. the default is true.""",
+ document=r"""True: slicing will also take into account shell and skin elements. False: slicing will ignore shell and skin elements. The default is true.""",
),
},
map_output_pin_spec={
@@ -147,14 +135,14 @@ def _spec():
name="mesh",
type_names=["meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -163,29 +151,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh_cut", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMeshCut:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMeshCut
+ inputs:
+ An instance of InputsMeshCut.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMeshCut:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMeshCut
+ outputs:
+ An instance of OutputsMeshCut.
"""
return super().outputs
@@ -224,17 +219,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._slice_surfaces)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field containing the values for the iso-
- surface computation. the mesh can be
- retrieved from this field's support
- or through pin 2.
+ Field containing the values for the iso-surface computation. The mesh can be retrieved from this field's support or through pin 2.
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -247,14 +240,15 @@ def field(self):
return self._field
@property
- def iso_value(self):
- """Allows to connect iso_value input to the operator.
+ def iso_value(self) -> Input:
+ r"""Allows to connect iso_value input to the operator.
- Iso value
+ iso value
- Parameters
- ----------
- my_iso_value : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -267,14 +261,15 @@ def iso_value(self):
return self._iso_value
@property
- def closed_surface(self):
- """Allows to connect closed_surface input to the operator.
+ def closed_surface(self) -> Input:
+ r"""Allows to connect closed_surface input to the operator.
1: closed surface, 0: iso surface.
- Parameters
- ----------
- my_closed_surface : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -287,16 +282,15 @@ def closed_surface(self):
return self._closed_surface
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh to compute the iso-surface from. used
- when not given through the support of
- the field in pin 0.
+ Mesh to compute the iso-surface from. Used when not given through the support of the field in pin 0.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -309,17 +303,15 @@ def mesh(self):
return self._mesh
@property
- def slice_surfaces(self):
- """Allows to connect slice_surfaces input to the operator.
+ def slice_surfaces(self) -> Input:
+ r"""Allows to connect slice_surfaces input to the operator.
- True: slicing will also take into account
- shell and skin elements. false:
- slicing will ignore shell and skin
- elements. the default is true.
+ True: slicing will also take into account shell and skin elements. False: slicing will ignore shell and skin elements. The default is true.
- Parameters
- ----------
- my_slice_surfaces : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -350,18 +342,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._mesh)
@property
- def mesh(self):
- """Allows to get mesh output of the operator
+ def mesh(self) -> Output:
+ r"""Allows to get mesh output of the operator
Returns
- ----------
- my_mesh : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_cut()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mesh = op.outputs.mesh()
- """ # noqa: E501
+ """
return self._mesh
diff --git a/src/ansys/dpf/core/operators/mesh/mesh_extraction.py b/src/ansys/dpf/core/operators/mesh/mesh_extraction.py
index 7cb355bfc65..129a4fc87b0 100644
--- a/src/ansys/dpf/core/operators/mesh/mesh_extraction.py
+++ b/src/ansys/dpf/core/operators/mesh/mesh_extraction.py
@@ -4,27 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class mesh_extraction(Operator):
- """Take a mesh and a scoping (elemental or nodal) and create a new mesh
+ r"""Take a mesh and a scoping (elemental or nodal) and create a new mesh
that contains this selection only.
+
Parameters
----------
- mesh : MeshedRegion
- mesh_scoping : Scoping
- extension : int, optional
+ mesh: MeshedRegion
+ mesh_scoping: Scoping
+ extension: int, optional
Number of extension layer
Returns
-------
- abstract_meshed_region : MeshedRegion
+ abstract_meshed_region: MeshedRegion
Examples
--------
@@ -66,9 +71,10 @@ def __init__(
self.inputs.extension.connect(extension)
@staticmethod
- def _spec():
- description = """Take a mesh and a scoping (elemental or nodal) and create a new mesh
- that contains this selection only."""
+ def _spec() -> Specification:
+ description = r"""Take a mesh and a scoping (elemental or nodal) and create a new mesh
+that contains this selection only.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -76,19 +82,19 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="extension",
type_names=["int32"],
optional=True,
- document="""Number of extension layer""",
+ document=r"""Number of extension layer""",
),
},
map_output_pin_spec={
@@ -96,14 +102,14 @@ def _spec():
name="abstract_meshed_region",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -112,29 +118,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh_extraction", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMeshExtraction:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMeshExtraction
+ inputs:
+ An instance of InputsMeshExtraction.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMeshExtraction:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMeshExtraction
+ outputs:
+ An instance of OutputsMeshExtraction.
"""
return super().outputs
@@ -165,12 +178,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._extension)
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -183,12 +197,13 @@ def mesh(self):
return self._mesh
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -201,14 +216,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def extension(self):
- """Allows to connect extension input to the operator.
+ def extension(self) -> Input:
+ r"""Allows to connect extension input to the operator.
Number of extension layer
- Parameters
- ----------
- my_extension : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -241,18 +257,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._abstract_meshed_region)
@property
- def abstract_meshed_region(self):
- """Allows to get abstract_meshed_region output of the operator
+ def abstract_meshed_region(self) -> Output:
+ r"""Allows to get abstract_meshed_region output of the operator
Returns
- ----------
- my_abstract_meshed_region : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_extraction()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_abstract_meshed_region = op.outputs.abstract_meshed_region()
- """ # noqa: E501
+ """
return self._abstract_meshed_region
diff --git a/src/ansys/dpf/core/operators/mesh/mesh_get_attribute.py b/src/ansys/dpf/core/operators/mesh/mesh_get_attribute.py
index 98977e9dcf4..21c6978d025 100644
--- a/src/ansys/dpf/core/operators/mesh/mesh_get_attribute.py
+++ b/src/ansys/dpf/core/operators/mesh/mesh_get_attribute.py
@@ -4,56 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class mesh_get_attribute(Operator):
- """Uses the MeshedRegion APIs to return a given attribute of the mesh in
+ r"""Uses the MeshedRegion APIs to return a given attribute of the mesh in
input.
+
Parameters
----------
- abstract_meshed_region : MeshedRegion
- property_name : str
- Supported property names are: "connectivity",
- "reverse_connectivity", "mat",
- "faces_nodes_connectivity",
- "elements_faces_connectivity" (or any
- mesh's property field),
- "coordinates", "named_selection",
- "num_named_selections",
- "named_selection_names",
- "named_selection_locations",
- "node_scoping", "element_scoping",
- "face_scoping"...
- property_identifier : int or str, optional
- Can be used to get a property at a given
- index, example: a named selection's
- number or by name, example: a named
- selection's name.
+ abstract_meshed_region: MeshedRegion
+ property_name: str
+ Supported property names are: "connectivity", "reverse_connectivity", "mat", "faces_nodes_connectivity", "elements_faces_connectivity" (or any mesh's property field), "coordinates", "named_selection", "num_named_selections", "named_selection_names", "named_selection_locations", "node_scoping", "element_scoping", "face_scoping"...
+ property_identifier: int or str, optional
+ Can be used to get a property at a given index, example: a named selection's number or by name, example: a named selection's name.
Returns
-------
- property : Scoping or Field or PropertyField or int or StringField
- Returns a property field for properties:
- "connectivity",
- "reverse_connectivity", "mat",
- "faces_nodes_connectivity",
- "elements_faces_connectivity" (or any
- mesh's property field), a field for
- property: "coordinates", a scoping
- for properties:"named_selection",
- "node_scoping", "element_scoping",
- "face_scoping", a string field for
- properties: "named_selection_names",
- "named_selection_locations" and an
- int for property:
- "num_named_selections".
+ property: Scoping or Field or PropertyField or int or StringField
+ Returns a property field for properties: "connectivity", "reverse_connectivity", "mat", "faces_nodes_connectivity", "elements_faces_connectivity" (or any mesh's property field), a field for property: "coordinates", a scoping for properties:"named_selection", "node_scoping", "element_scoping", "face_scoping", a string field for properties: "named_selection_names", "named_selection_locations" and an int for property: "num_named_selections".
Examples
--------
@@ -100,9 +79,10 @@ def __init__(
self.inputs.property_identifier.connect(property_identifier)
@staticmethod
- def _spec():
- description = """Uses the MeshedRegion APIs to return a given attribute of the mesh in
- input."""
+ def _spec() -> Specification:
+ description = r"""Uses the MeshedRegion APIs to return a given attribute of the mesh in
+input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -110,32 +90,19 @@ def _spec():
name="abstract_meshed_region",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="property_name",
type_names=["string"],
optional=False,
- document="""Supported property names are: "connectivity",
- "reverse_connectivity", "mat",
- "faces_nodes_connectivity",
- "elements_faces_connectivity" (or any
- mesh's property field),
- "coordinates", "named_selection",
- "num_named_selections",
- "named_selection_names",
- "named_selection_locations",
- "node_scoping", "element_scoping",
- "face_scoping"...""",
+ document=r"""Supported property names are: "connectivity", "reverse_connectivity", "mat", "faces_nodes_connectivity", "elements_faces_connectivity" (or any mesh's property field), "coordinates", "named_selection", "num_named_selections", "named_selection_names", "named_selection_locations", "node_scoping", "element_scoping", "face_scoping"...""",
),
2: PinSpecification(
name="property_identifier",
type_names=["int32", "string"],
optional=True,
- document="""Can be used to get a property at a given
- index, example: a named selection's
- number or by name, example: a named
- selection's name.""",
+ document=r"""Can be used to get a property at a given index, example: a named selection's number or by name, example: a named selection's name.""",
),
},
map_output_pin_spec={
@@ -149,27 +116,14 @@ def _spec():
"string_field",
],
optional=False,
- document="""Returns a property field for properties:
- "connectivity",
- "reverse_connectivity", "mat",
- "faces_nodes_connectivity",
- "elements_faces_connectivity" (or any
- mesh's property field), a field for
- property: "coordinates", a scoping
- for properties:"named_selection",
- "node_scoping", "element_scoping",
- "face_scoping", a string field for
- properties: "named_selection_names",
- "named_selection_locations" and an
- int for property:
- "num_named_selections".""",
+ document=r"""Returns a property field for properties: "connectivity", "reverse_connectivity", "mat", "faces_nodes_connectivity", "elements_faces_connectivity" (or any mesh's property field), a field for property: "coordinates", a scoping for properties:"named_selection", "node_scoping", "element_scoping", "face_scoping", a string field for properties: "named_selection_names", "named_selection_locations" and an int for property: "num_named_selections".""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -178,29 +132,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh::get_attribute", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMeshGetAttribute:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMeshGetAttribute
+ inputs:
+ An instance of InputsMeshGetAttribute.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMeshGetAttribute:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMeshGetAttribute
+ outputs:
+ An instance of OutputsMeshGetAttribute.
"""
return super().outputs
@@ -235,12 +196,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._property_identifier)
@property
- def abstract_meshed_region(self):
- """Allows to connect abstract_meshed_region input to the operator.
+ def abstract_meshed_region(self) -> Input:
+ r"""Allows to connect abstract_meshed_region input to the operator.
- Parameters
- ----------
- my_abstract_meshed_region : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -253,24 +215,15 @@ def abstract_meshed_region(self):
return self._abstract_meshed_region
@property
- def property_name(self):
- """Allows to connect property_name input to the operator.
-
- Supported property names are: "connectivity",
- "reverse_connectivity", "mat",
- "faces_nodes_connectivity",
- "elements_faces_connectivity" (or any
- mesh's property field),
- "coordinates", "named_selection",
- "num_named_selections",
- "named_selection_names",
- "named_selection_locations",
- "node_scoping", "element_scoping",
- "face_scoping"...
+ def property_name(self) -> Input:
+ r"""Allows to connect property_name input to the operator.
- Parameters
- ----------
- my_property_name : str
+ Supported property names are: "connectivity", "reverse_connectivity", "mat", "faces_nodes_connectivity", "elements_faces_connectivity" (or any mesh's property field), "coordinates", "named_selection", "num_named_selections", "named_selection_names", "named_selection_locations", "node_scoping", "element_scoping", "face_scoping"...
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -283,17 +236,15 @@ def property_name(self):
return self._property_name
@property
- def property_identifier(self):
- """Allows to connect property_identifier input to the operator.
+ def property_identifier(self) -> Input:
+ r"""Allows to connect property_identifier input to the operator.
- Can be used to get a property at a given
- index, example: a named selection's
- number or by name, example: a named
- selection's name.
+ Can be used to get a property at a given index, example: a named selection's number or by name, example: a named selection's name.
- Parameters
- ----------
- my_property_identifier : int or str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
diff --git a/src/ansys/dpf/core/operators/mesh/mesh_plan_clip.py b/src/ansys/dpf/core/operators/mesh/mesh_plan_clip.py
index 25191828060..6726af2da75 100644
--- a/src/ansys/dpf/core/operators/mesh/mesh_plan_clip.py
+++ b/src/ansys/dpf/core/operators/mesh/mesh_plan_clip.py
@@ -4,30 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class mesh_plan_clip(Operator):
- """Clips a volume mesh along a plane and keeps one side.
+ r"""Clips a volume mesh along a plane and keeps one side.
+
Parameters
----------
- mesh_or_field : MeshedRegion or Field
- normal : Field
- An overall 3d vector that gives the normal
- direction of the plane.
- origin : Field
- An overall 3d vector that gives a point of
- the plane.
+ mesh_or_field: MeshedRegion or Field
+ normal: Field
+ An overall 3D vector that gives the normal direction of the plane.
+ origin: Field
+ An overall 3D vector that gives a point of the plane.
Returns
-------
- field : Field
- mesh : MeshedRegion
+ field: Field
+ mesh: MeshedRegion
Examples
--------
@@ -70,8 +73,9 @@ def __init__(
self.inputs.origin.connect(origin)
@staticmethod
- def _spec():
- description = """Clips a volume mesh along a plane and keeps one side."""
+ def _spec() -> Specification:
+ description = r"""Clips a volume mesh along a plane and keeps one side.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -79,21 +83,19 @@ def _spec():
name="mesh_or_field",
type_names=["abstract_meshed_region", "field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="normal",
type_names=["field"],
optional=False,
- document="""An overall 3d vector that gives the normal
- direction of the plane.""",
+ document=r"""An overall 3D vector that gives the normal direction of the plane.""",
),
2: PinSpecification(
name="origin",
type_names=["field"],
optional=False,
- document="""An overall 3d vector that gives a point of
- the plane.""",
+ document=r"""An overall 3D vector that gives a point of the plane.""",
),
},
map_output_pin_spec={
@@ -101,20 +103,20 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -123,29 +125,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh_plan_clip", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMeshPlanClip:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMeshPlanClip
+ inputs:
+ An instance of InputsMeshPlanClip.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMeshPlanClip:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMeshPlanClip
+ outputs:
+ An instance of OutputsMeshPlanClip.
"""
return super().outputs
@@ -176,12 +185,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._origin)
@property
- def mesh_or_field(self):
- """Allows to connect mesh_or_field input to the operator.
+ def mesh_or_field(self) -> Input:
+ r"""Allows to connect mesh_or_field input to the operator.
- Parameters
- ----------
- my_mesh_or_field : MeshedRegion or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -194,15 +204,15 @@ def mesh_or_field(self):
return self._mesh_or_field
@property
- def normal(self):
- """Allows to connect normal input to the operator.
+ def normal(self) -> Input:
+ r"""Allows to connect normal input to the operator.
- An overall 3d vector that gives the normal
- direction of the plane.
+ An overall 3D vector that gives the normal direction of the plane.
- Parameters
- ----------
- my_normal : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -215,15 +225,15 @@ def normal(self):
return self._normal
@property
- def origin(self):
- """Allows to connect origin input to the operator.
+ def origin(self) -> Input:
+ r"""Allows to connect origin input to the operator.
- An overall 3d vector that gives a point of
- the plane.
+ An overall 3D vector that gives a point of the plane.
- Parameters
- ----------
- my_origin : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -257,35 +267,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._mesh)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_plan_clip()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
@property
- def mesh(self):
- """Allows to get mesh output of the operator
+ def mesh(self) -> Output:
+ r"""Allows to get mesh output of the operator
Returns
- ----------
- my_mesh : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_plan_clip()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mesh = op.outputs.mesh()
- """ # noqa: E501
+ """
return self._mesh
diff --git a/src/ansys/dpf/core/operators/mesh/mesh_provider.py b/src/ansys/dpf/core/operators/mesh/mesh_provider.py
index 2d37c365a8c..361e2dc9651 100644
--- a/src/ansys/dpf/core/operators/mesh/mesh_provider.py
+++ b/src/ansys/dpf/core/operators/mesh/mesh_provider.py
@@ -4,61 +4,42 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class mesh_provider(Operator):
- """Reads a mesh from result files.
+ r"""Reads a mesh from result files.
+
Parameters
----------
- time_scoping : int, optional
- Optional time/frequency set id of the mesh,
- supported for adaptative meshes.
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- read_cyclic : int, optional
- If 1, cyclic symmetry is ignored. if 2,
- cyclic expansion is done (default is
- 1).
- region_scoping : Scoping or int, optional
- Region id (integer) or vector of region ids
- with one entity (vector) or region
- scoping with one id (scoping) (region
- corresponds to zone for fluid results
- or part for lsdyna results).
- laziness : DataTree, optional
- Configurate whether lazy evaluation can be
- performed and to what extent.
- supported attributes are: -
- "num_named_selections"->num named
- selection to read (-1 is all, int32,
- default si -1), careful: the other
- named selections will not be
- available, use mesh_property_provider
- operator to read them. - all mesh
- property fields "mat",
- "named_selection",
- "apdl_element_type", "section"-> if
- set to 1 these properties will not be
- read and a workflow will be bounded
- to the properties to be evaluated on
- demand, with 0 they are read (default
- is 0). - "all_available_properties"
- option set to 0 will return all
- possible properties
+ time_scoping: int, optional
+ Optional time/frequency set ID of the mesh, supported for adaptative meshes.
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ read_cyclic: int, optional
+ If 1, cyclic symmetry is ignored. If 2, cyclic expansion is done (default is 1).
+ region_scoping: Scoping or int, optional
+ region id (integer) or vector of region ids with one entity (vector) or region scoping with one id (scoping) (region corresponds to zone for Fluid results or part for LSDyna results).
+ laziness: DataTree, optional
+ configurate whether lazy evaluation can be performed and to what extent. Supported attributes are:
+ - "num_named_selections"->num named selection to read (-1 is all, int32, default si -1), careful: the other named selections will not be available, use mesh_property_provider Operator to read them.
+ - all mesh property fields "mat", "named_selection", "apdl_element_type", "section"-> if set to 1 these properties will not be read and a workflow will be bounded to the properties to be evaluated on demand, with 0 they are read (default is 0).
+ - "all_available_properties" option set to 0 will return all possible properties
Returns
-------
- mesh : MeshedRegion
+ mesh: MeshedRegion
Examples
--------
@@ -123,8 +104,9 @@ def __init__(
self.inputs.laziness.connect(laziness)
@staticmethod
- def _spec():
- description = """Reads a mesh from result files."""
+ def _spec() -> Specification:
+ description = r"""Reads a mesh from result files.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -132,64 +114,40 @@ def _spec():
name="time_scoping",
type_names=["int32"],
optional=True,
- document="""Optional time/frequency set id of the mesh,
- supported for adaptative meshes.""",
+ document=r"""Optional time/frequency set ID of the mesh, supported for adaptative meshes.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 1, cyclic symmetry is ignored. if 2,
- cyclic expansion is done (default is
- 1).""",
+ document=r"""If 1, cyclic symmetry is ignored. If 2, cyclic expansion is done (default is 1).""",
),
25: PinSpecification(
name="region_scoping",
type_names=["scoping", "int32", "vector"],
optional=True,
- document="""Region id (integer) or vector of region ids
- with one entity (vector) or region
- scoping with one id (scoping) (region
- corresponds to zone for fluid results
- or part for lsdyna results).""",
+ document=r"""region id (integer) or vector of region ids with one entity (vector) or region scoping with one id (scoping) (region corresponds to zone for Fluid results or part for LSDyna results).""",
),
200: PinSpecification(
name="laziness",
type_names=["abstract_data_tree"],
optional=True,
- document="""Configurate whether lazy evaluation can be
- performed and to what extent.
- supported attributes are: -
- "num_named_selections"->num named
- selection to read (-1 is all, int32,
- default si -1), careful: the other
- named selections will not be
- available, use mesh_property_provider
- operator to read them. - all mesh
- property fields "mat",
- "named_selection",
- "apdl_element_type", "section"-> if
- set to 1 these properties will not be
- read and a workflow will be bounded
- to the properties to be evaluated on
- demand, with 0 they are read (default
- is 0). - "all_available_properties"
- option set to 0 will return all
- possible properties""",
+ document=r"""configurate whether lazy evaluation can be performed and to what extent. Supported attributes are:
+- "num_named_selections"->num named selection to read (-1 is all, int32, default si -1), careful: the other named selections will not be available, use mesh_property_provider Operator to read them.
+- all mesh property fields "mat", "named_selection", "apdl_element_type", "section"-> if set to 1 these properties will not be read and a workflow will be bounded to the properties to be evaluated on demand, with 0 they are read (default is 0).
+- "all_available_properties" option set to 0 will return all possible properties""",
),
},
map_output_pin_spec={
@@ -197,14 +155,14 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -213,29 +171,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMeshProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMeshProvider
+ inputs:
+ An instance of InputsMeshProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMeshProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMeshProvider
+ outputs:
+ An instance of OutputsMeshProvider.
"""
return super().outputs
@@ -278,15 +243,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._laziness)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Optional time/frequency set id of the mesh,
- supported for adaptative meshes.
+ Optional time/frequency set ID of the mesh, supported for adaptative meshes.
- Parameters
- ----------
- my_time_scoping : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -299,15 +264,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -320,15 +285,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -341,16 +306,15 @@ def data_sources(self):
return self._data_sources
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 1, cyclic symmetry is ignored. if 2,
- cyclic expansion is done (default is
- 1).
+ If 1, cyclic symmetry is ignored. If 2, cyclic expansion is done (default is 1).
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -363,18 +327,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def region_scoping(self):
- """Allows to connect region_scoping input to the operator.
+ def region_scoping(self) -> Input:
+ r"""Allows to connect region_scoping input to the operator.
- Region id (integer) or vector of region ids
- with one entity (vector) or region
- scoping with one id (scoping) (region
- corresponds to zone for fluid results
- or part for lsdyna results).
+ region id (integer) or vector of region ids with one entity (vector) or region scoping with one id (scoping) (region corresponds to zone for Fluid results or part for LSDyna results).
- Parameters
- ----------
- my_region_scoping : Scoping or int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -387,32 +348,18 @@ def region_scoping(self):
return self._region_scoping
@property
- def laziness(self):
- """Allows to connect laziness input to the operator.
-
- Configurate whether lazy evaluation can be
- performed and to what extent.
- supported attributes are: -
- "num_named_selections"->num named
- selection to read (-1 is all, int32,
- default si -1), careful: the other
- named selections will not be
- available, use mesh_property_provider
- operator to read them. - all mesh
- property fields "mat",
- "named_selection",
- "apdl_element_type", "section"-> if
- set to 1 these properties will not be
- read and a workflow will be bounded
- to the properties to be evaluated on
- demand, with 0 they are read (default
- is 0). - "all_available_properties"
- option set to 0 will return all
- possible properties
+ def laziness(self) -> Input:
+ r"""Allows to connect laziness input to the operator.
- Parameters
- ----------
- my_laziness : DataTree
+ configurate whether lazy evaluation can be performed and to what extent. Supported attributes are:
+ - "num_named_selections"->num named selection to read (-1 is all, int32, default si -1), careful: the other named selections will not be available, use mesh_property_provider Operator to read them.
+ - all mesh property fields "mat", "named_selection", "apdl_element_type", "section"-> if set to 1 these properties will not be read and a workflow will be bounded to the properties to be evaluated on demand, with 0 they are read (default is 0).
+ - "all_available_properties" option set to 0 will return all possible properties
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -443,18 +390,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._mesh)
@property
- def mesh(self):
- """Allows to get mesh output of the operator
+ def mesh(self) -> Output:
+ r"""Allows to get mesh output of the operator
Returns
- ----------
- my_mesh : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mesh = op.outputs.mesh()
- """ # noqa: E501
+ """
return self._mesh
diff --git a/src/ansys/dpf/core/operators/mesh/mesh_to_graphics.py b/src/ansys/dpf/core/operators/mesh/mesh_to_graphics.py
index 7b402e7f77c..d8ae5c1949e 100644
--- a/src/ansys/dpf/core/operators/mesh/mesh_to_graphics.py
+++ b/src/ansys/dpf/core/operators/mesh/mesh_to_graphics.py
@@ -4,32 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class mesh_to_graphics(Operator):
- """Generate tessellation for input mesh
+ r"""Generate tessellation for input mesh
+
Parameters
----------
- mesh_scoping : Scoping, optional
- node_normals : bool, optional
- Average element normals for node normals
- (default no, use element normals for
- node normals)
- mesh : MeshedRegion
+ mesh_scoping: Scoping, optional
+ node_normals: bool, optional
+ average element normals for node normals (default no, use element normals for node normals)
+ mesh: MeshedRegion
Returns
-------
- nodes : Field
- Node coordinates
- normals : Field
- Node normals
- connectivity : PropertyField
+ nodes: Field
+ node coordinates
+ normals: Field
+ node normals
+ connectivity: PropertyField
Examples
--------
@@ -73,8 +76,9 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Generate tessellation for input mesh"""
+ def _spec() -> Specification:
+ description = r"""Generate tessellation for input mesh
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -82,21 +86,19 @@ def _spec():
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="node_normals",
type_names=["bool"],
optional=True,
- document="""Average element normals for node normals
- (default no, use element normals for
- node normals)""",
+ document=r"""average element normals for node normals (default no, use element normals for node normals)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -104,26 +106,26 @@ def _spec():
name="nodes",
type_names=["field"],
optional=False,
- document="""Node coordinates""",
+ document=r"""node coordinates""",
),
1: PinSpecification(
name="normals",
type_names=["field"],
optional=False,
- document="""Node normals""",
+ document=r"""node normals""",
),
2: PinSpecification(
name="connectivity",
type_names=["property_field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -132,29 +134,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh_to_graphics", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMeshToGraphics:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMeshToGraphics
+ inputs:
+ An instance of InputsMeshToGraphics.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMeshToGraphics:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMeshToGraphics
+ outputs:
+ An instance of OutputsMeshToGraphics.
"""
return super().outputs
@@ -185,12 +194,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -203,16 +213,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def node_normals(self):
- """Allows to connect node_normals input to the operator.
+ def node_normals(self) -> Input:
+ r"""Allows to connect node_normals input to the operator.
- Average element normals for node normals
- (default no, use element normals for
- node normals)
+ average element normals for node normals (default no, use element normals for node normals)
- Parameters
- ----------
- my_node_normals : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -225,12 +234,13 @@ def node_normals(self):
return self._node_normals
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -267,52 +277,59 @@ def __init__(self, op: Operator):
self._outputs.append(self._connectivity)
@property
- def nodes(self):
- """Allows to get nodes output of the operator
+ def nodes(self) -> Output:
+ r"""Allows to get nodes output of the operator
+
+ node coordinates
Returns
- ----------
- my_nodes : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_to_graphics()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_nodes = op.outputs.nodes()
- """ # noqa: E501
+ """
return self._nodes
@property
- def normals(self):
- """Allows to get normals output of the operator
+ def normals(self) -> Output:
+ r"""Allows to get normals output of the operator
+
+ node normals
Returns
- ----------
- my_normals : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_to_graphics()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_normals = op.outputs.normals()
- """ # noqa: E501
+ """
return self._normals
@property
- def connectivity(self):
- """Allows to get connectivity output of the operator
+ def connectivity(self) -> Output:
+ r"""Allows to get connectivity output of the operator
Returns
- ----------
- my_connectivity : PropertyField
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_to_graphics()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_connectivity = op.outputs.connectivity()
- """ # noqa: E501
+ """
return self._connectivity
diff --git a/src/ansys/dpf/core/operators/mesh/mesh_to_graphics_edges.py b/src/ansys/dpf/core/operators/mesh/mesh_to_graphics_edges.py
index bcfafbb6728..4e5b5d2c05e 100644
--- a/src/ansys/dpf/core/operators/mesh/mesh_to_graphics_edges.py
+++ b/src/ansys/dpf/core/operators/mesh/mesh_to_graphics_edges.py
@@ -4,27 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class mesh_to_graphics_edges(Operator):
- """Generate edges of surface elements for input mesh
+ r"""Generate edges of surface elements for input mesh
+
Parameters
----------
- mesh_scoping : Scoping, optional
- include_mid_nodes : bool, optional
- mesh : MeshedRegion
+ mesh_scoping: Scoping, optional
+ include_mid_nodes: bool, optional
+ mesh: MeshedRegion
Returns
-------
- nodes : Field
- Node coordinates
- connectivity : PropertyField
+ nodes: Field
+ node coordinates
+ connectivity: PropertyField
Examples
--------
@@ -72,8 +77,9 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Generate edges of surface elements for input mesh"""
+ def _spec() -> Specification:
+ description = r"""Generate edges of surface elements for input mesh
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -81,19 +87,19 @@ def _spec():
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
6: PinSpecification(
name="include_mid_nodes",
type_names=["bool"],
optional=True,
- document="""""",
+ document=r"""""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -101,20 +107,20 @@ def _spec():
name="nodes",
type_names=["field"],
optional=False,
- document="""Node coordinates""",
+ document=r"""node coordinates""",
),
2: PinSpecification(
name="connectivity",
type_names=["property_field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -123,29 +129,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh_to_graphics_edges", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMeshToGraphicsEdges:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMeshToGraphicsEdges
+ inputs:
+ An instance of InputsMeshToGraphicsEdges.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMeshToGraphicsEdges:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMeshToGraphicsEdges
+ outputs:
+ An instance of OutputsMeshToGraphicsEdges.
"""
return super().outputs
@@ -180,12 +193,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -198,12 +212,13 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def include_mid_nodes(self):
- """Allows to connect include_mid_nodes input to the operator.
+ def include_mid_nodes(self) -> Input:
+ r"""Allows to connect include_mid_nodes input to the operator.
- Parameters
- ----------
- my_include_mid_nodes : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -216,12 +231,13 @@ def include_mid_nodes(self):
return self._include_mid_nodes
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -255,35 +271,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._connectivity)
@property
- def nodes(self):
- """Allows to get nodes output of the operator
+ def nodes(self) -> Output:
+ r"""Allows to get nodes output of the operator
+
+ node coordinates
Returns
- ----------
- my_nodes : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_to_graphics_edges()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_nodes = op.outputs.nodes()
- """ # noqa: E501
+ """
return self._nodes
@property
- def connectivity(self):
- """Allows to get connectivity output of the operator
+ def connectivity(self) -> Output:
+ r"""Allows to get connectivity output of the operator
Returns
- ----------
- my_connectivity : PropertyField
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_to_graphics_edges()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_connectivity = op.outputs.connectivity()
- """ # noqa: E501
+ """
return self._connectivity
diff --git a/src/ansys/dpf/core/operators/mesh/mesh_to_pyvista.py b/src/ansys/dpf/core/operators/mesh/mesh_to_pyvista.py
index 78e13a0a33a..f7fd30836b9 100644
--- a/src/ansys/dpf/core/operators/mesh/mesh_to_pyvista.py
+++ b/src/ansys/dpf/core/operators/mesh/mesh_to_pyvista.py
@@ -4,44 +4,43 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class mesh_to_pyvista(Operator):
- """Export a MeshedRegion in the pyVista format.
+ r"""Export a MeshedRegion in the pyVista format.
+
Parameters
----------
- coordinates : Field, optional
- Node coordinates. if not set, the node
- coordinates of the mesh are employed.
- as_linear : bool, optional
- Export a linear version of the mesh
- (quadratic surface elements do no
- include midside nodes). if not set,
- defaults to true.
- mesh : MeshedRegion
- Mesh to export in pyvista format
- vtk_updated : bool, optional
- True if the vtk version employed by pyvista
- is > vtk 9. default true.
- as_poly : bool, optional
- Export elements as polyhedrons (cell-face-
- node representation). default false.
+ coordinates: Field, optional
+ Node coordinates. If not set, the node coordinates of the mesh are employed.
+ as_linear: bool, optional
+ Export a linear version of the mesh (quadratic surface elements do no include midside nodes). If not set, defaults to true.
+ mesh: MeshedRegion
+ mesh to export in pyVista format
+ vtk_updated: bool, optional
+ True if the VTK version employed by pyVista is > VTK 9. Default true.
+ as_poly: bool, optional
+ Export elements as polyhedrons (cell-face-node representation). Default false.
Returns
-------
- nodes : Field
+ nodes: Field
Node coordinates double vector
- cells :
+ cells:
Cell connectivity int vector
- cell_types :
+ cell_types:
Cell types property int vector
- offsets : optional
+ offsets: optional
If vtk_updated=false, offsets int vector
Examples
@@ -104,8 +103,9 @@ def __init__(
self.inputs.as_poly.connect(as_poly)
@staticmethod
- def _spec():
- description = """Export a MeshedRegion in the pyVista format."""
+ def _spec() -> Specification:
+ description = r"""Export a MeshedRegion in the pyVista format.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -113,37 +113,31 @@ def _spec():
name="coordinates",
type_names=["field"],
optional=True,
- document="""Node coordinates. if not set, the node
- coordinates of the mesh are employed.""",
+ document=r"""Node coordinates. If not set, the node coordinates of the mesh are employed.""",
),
6: PinSpecification(
name="as_linear",
type_names=["bool"],
optional=True,
- document="""Export a linear version of the mesh
- (quadratic surface elements do no
- include midside nodes). if not set,
- defaults to true.""",
+ document=r"""Export a linear version of the mesh (quadratic surface elements do no include midside nodes). If not set, defaults to true.""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""Mesh to export in pyvista format""",
+ document=r"""mesh to export in pyVista format""",
),
60: PinSpecification(
name="vtk_updated",
type_names=["bool"],
optional=True,
- document="""True if the vtk version employed by pyvista
- is > vtk 9. default true.""",
+ document=r"""True if the VTK version employed by pyVista is > VTK 9. Default true.""",
),
200: PinSpecification(
name="as_poly",
type_names=["bool"],
optional=True,
- document="""Export elements as polyhedrons (cell-face-
- node representation). default false.""",
+ document=r"""Export elements as polyhedrons (cell-face-node representation). Default false.""",
),
},
map_output_pin_spec={
@@ -151,32 +145,32 @@ def _spec():
name="nodes",
type_names=["field"],
optional=False,
- document="""Node coordinates double vector""",
+ document=r"""Node coordinates double vector""",
),
1: PinSpecification(
name="cells",
type_names=["vector"],
optional=False,
- document="""Cell connectivity int vector""",
+ document=r"""Cell connectivity int vector""",
),
2: PinSpecification(
name="cell_types",
type_names=["vector"],
optional=False,
- document="""Cell types property int vector""",
+ document=r"""Cell types property int vector""",
),
3: PinSpecification(
name="offsets",
type_names=["vector"],
optional=True,
- document="""If vtk_updated=false, offsets int vector""",
+ document=r"""If vtk_updated=false, offsets int vector""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -185,29 +179,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh_to_pyvista", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMeshToPyvista:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMeshToPyvista
+ inputs:
+ An instance of InputsMeshToPyvista.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMeshToPyvista:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMeshToPyvista
+ outputs:
+ An instance of OutputsMeshToPyvista.
"""
return super().outputs
@@ -246,15 +247,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._as_poly)
@property
- def coordinates(self):
- """Allows to connect coordinates input to the operator.
+ def coordinates(self) -> Input:
+ r"""Allows to connect coordinates input to the operator.
- Node coordinates. if not set, the node
- coordinates of the mesh are employed.
+ Node coordinates. If not set, the node coordinates of the mesh are employed.
- Parameters
- ----------
- my_coordinates : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -267,17 +268,15 @@ def coordinates(self):
return self._coordinates
@property
- def as_linear(self):
- """Allows to connect as_linear input to the operator.
+ def as_linear(self) -> Input:
+ r"""Allows to connect as_linear input to the operator.
- Export a linear version of the mesh
- (quadratic surface elements do no
- include midside nodes). if not set,
- defaults to true.
+ Export a linear version of the mesh (quadratic surface elements do no include midside nodes). If not set, defaults to true.
- Parameters
- ----------
- my_as_linear : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -290,14 +289,15 @@ def as_linear(self):
return self._as_linear
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh to export in pyvista format
+ mesh to export in pyVista format
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -310,15 +310,15 @@ def mesh(self):
return self._mesh
@property
- def vtk_updated(self):
- """Allows to connect vtk_updated input to the operator.
+ def vtk_updated(self) -> Input:
+ r"""Allows to connect vtk_updated input to the operator.
- True if the vtk version employed by pyvista
- is > vtk 9. default true.
+ True if the VTK version employed by pyVista is > VTK 9. Default true.
- Parameters
- ----------
- my_vtk_updated : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -331,15 +331,15 @@ def vtk_updated(self):
return self._vtk_updated
@property
- def as_poly(self):
- """Allows to connect as_poly input to the operator.
+ def as_poly(self) -> Input:
+ r"""Allows to connect as_poly input to the operator.
- Export elements as polyhedrons (cell-face-
- node representation). default false.
+ Export elements as polyhedrons (cell-face-node representation). Default false.
- Parameters
- ----------
- my_as_poly : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -379,69 +379,81 @@ def __init__(self, op: Operator):
self._outputs.append(self._offsets)
@property
- def nodes(self):
- """Allows to get nodes output of the operator
+ def nodes(self) -> Output:
+ r"""Allows to get nodes output of the operator
+
+ Node coordinates double vector
Returns
- ----------
- my_nodes : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_to_pyvista()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_nodes = op.outputs.nodes()
- """ # noqa: E501
+ """
return self._nodes
@property
- def cells(self):
- """Allows to get cells output of the operator
+ def cells(self) -> Output:
+ r"""Allows to get cells output of the operator
+
+ Cell connectivity int vector
Returns
- ----------
- my_cells :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_to_pyvista()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_cells = op.outputs.cells()
- """ # noqa: E501
+ """
return self._cells
@property
- def cell_types(self):
- """Allows to get cell_types output of the operator
+ def cell_types(self) -> Output:
+ r"""Allows to get cell_types output of the operator
+
+ Cell types property int vector
Returns
- ----------
- my_cell_types :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_to_pyvista()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_cell_types = op.outputs.cell_types()
- """ # noqa: E501
+ """
return self._cell_types
@property
- def offsets(self):
- """Allows to get offsets output of the operator
+ def offsets(self) -> Output:
+ r"""Allows to get offsets output of the operator
+
+ If vtk_updated=false, offsets int vector
Returns
- ----------
- my_offsets :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_to_pyvista()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_offsets = op.outputs.offsets()
- """ # noqa: E501
+ """
return self._offsets
diff --git a/src/ansys/dpf/core/operators/mesh/mesh_to_tetra.py b/src/ansys/dpf/core/operators/mesh/mesh_to_tetra.py
index 18a111ba367..2d7c15c9650 100644
--- a/src/ansys/dpf/core/operators/mesh/mesh_to_tetra.py
+++ b/src/ansys/dpf/core/operators/mesh/mesh_to_tetra.py
@@ -4,32 +4,36 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class mesh_to_tetra(Operator):
- """Converts 3D meshes of arbitrary 3D element types into a tetrahedral
- mesh, output at pin (0). Non 3D elements are ignored. Scopings
- providing the mapping from resulting nodes & elements to their
- original ID in the input mesh are provided, output pins (1) & (2)
- respectively.
+ r"""Converts 3D meshes of arbitrary 3D element types into a tetrahedral
+ mesh, output at pin (0). Non 3D elements are ignored. Scopings providing
+ the mapping from resulting nodes & elements to their original ID in the
+ input mesh are provided, output pins (1) & (2) respectively.
+
Parameters
----------
- mesh : MeshedRegion
+ mesh: MeshedRegion
Mesh with arbitrary element types.
Returns
-------
- mesh : MeshedRegion
+ mesh: MeshedRegion
Tetrahedralized mesh.
- node_mapping : Scoping
+ node_mapping: Scoping
Node mapping.
- element_mapping : Scoping
+ element_mapping: Scoping
Element mapping.
Examples
@@ -62,12 +66,12 @@ def __init__(self, mesh=None, config=None, server=None):
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Converts 3D meshes of arbitrary 3D element types into a tetrahedral
- mesh, output at pin (0). Non 3D elements are ignored.
- Scopings providing the mapping from resulting nodes &
- elements to their original ID in the input mesh are
- provided, output pins (1) & (2) respectively."""
+ def _spec() -> Specification:
+ description = r"""Converts 3D meshes of arbitrary 3D element types into a tetrahedral
+mesh, output at pin (0). Non 3D elements are ignored. Scopings providing
+the mapping from resulting nodes & elements to their original ID in the
+input mesh are provided, output pins (1) & (2) respectively.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -75,7 +79,7 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""Mesh with arbitrary element types.""",
+ document=r"""Mesh with arbitrary element types.""",
),
},
map_output_pin_spec={
@@ -83,26 +87,26 @@ def _spec():
name="mesh",
type_names=["meshed_region"],
optional=False,
- document="""Tetrahedralized mesh.""",
+ document=r"""Tetrahedralized mesh.""",
),
1: PinSpecification(
name="node_mapping",
type_names=["scoping"],
optional=False,
- document="""Node mapping.""",
+ document=r"""Node mapping.""",
),
2: PinSpecification(
name="element_mapping",
type_names=["scoping"],
optional=False,
- document="""Element mapping.""",
+ document=r"""Element mapping.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -111,29 +115,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh_to_tetra", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMeshToTetra:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMeshToTetra
+ inputs:
+ An instance of InputsMeshToTetra.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMeshToTetra:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMeshToTetra
+ outputs:
+ An instance of OutputsMeshToTetra.
"""
return super().outputs
@@ -156,14 +167,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
Mesh with arbitrary element types.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -200,52 +212,61 @@ def __init__(self, op: Operator):
self._outputs.append(self._element_mapping)
@property
- def mesh(self):
- """Allows to get mesh output of the operator
+ def mesh(self) -> Output:
+ r"""Allows to get mesh output of the operator
+
+ Tetrahedralized mesh.
Returns
- ----------
- my_mesh : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_to_tetra()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mesh = op.outputs.mesh()
- """ # noqa: E501
+ """
return self._mesh
@property
- def node_mapping(self):
- """Allows to get node_mapping output of the operator
+ def node_mapping(self) -> Output:
+ r"""Allows to get node_mapping output of the operator
+
+ Node mapping.
Returns
- ----------
- my_node_mapping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_to_tetra()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_node_mapping = op.outputs.node_mapping()
- """ # noqa: E501
+ """
return self._node_mapping
@property
- def element_mapping(self):
- """Allows to get element_mapping output of the operator
+ def element_mapping(self) -> Output:
+ r"""Allows to get element_mapping output of the operator
+
+ Element mapping.
Returns
- ----------
- my_element_mapping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.mesh_to_tetra()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_element_mapping = op.outputs.element_mapping()
- """ # noqa: E501
+ """
return self._element_mapping
diff --git a/src/ansys/dpf/core/operators/mesh/meshes_provider.py b/src/ansys/dpf/core/operators/mesh/meshes_provider.py
index 33f1ba06608..41543eff132 100644
--- a/src/ansys/dpf/core/operators/mesh/meshes_provider.py
+++ b/src/ansys/dpf/core/operators/mesh/meshes_provider.py
@@ -4,41 +4,38 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class meshes_provider(Operator):
- """Reads meshes from result files. Meshes can be spatially or temporally
+ r"""Reads meshes from result files. Meshes can be spatially or temporally
varying.
+
Parameters
----------
- time_scoping : Scoping or int, optional
- Time/frequency set ids required in output.
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- read_cyclic : int, optional
- If 1, cyclic symmetry is ignored. if 2,
- cyclic expansion is done (default is
- 1).
- region_scoping : Scoping or int, optional
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
+ time_scoping: Scoping or int, optional
+ Time/frequency set IDs required in output.
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ read_cyclic: int, optional
+ if 1, cyclic symmetry is ignored. If 2, cyclic expansion is done (default is 1).
+ region_scoping: Scoping or int, optional
+ region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).
Returns
-------
- meshes : MeshesContainer
+ meshes: MeshesContainer
Examples
--------
@@ -97,9 +94,10 @@ def __init__(
self.inputs.region_scoping.connect(region_scoping)
@staticmethod
- def _spec():
- description = """Reads meshes from result files. Meshes can be spatially or temporally
- varying."""
+ def _spec() -> Specification:
+ description = r"""Reads meshes from result files. Meshes can be spatially or temporally
+varying.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -107,39 +105,31 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector", "int32"],
optional=True,
- document="""Time/frequency set ids required in output.""",
+ document=r"""Time/frequency set IDs required in output.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 1, cyclic symmetry is ignored. if 2,
- cyclic expansion is done (default is
- 1).""",
+ document=r"""if 1, cyclic symmetry is ignored. If 2, cyclic expansion is done (default is 1).""",
),
25: PinSpecification(
name="region_scoping",
type_names=["scoping", "int32", "vector"],
optional=True,
- document="""Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).""",
+ document=r"""region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).""",
),
},
map_output_pin_spec={
@@ -147,14 +137,14 @@ def _spec():
name="meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -163,29 +153,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="meshes_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMeshesProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMeshesProvider
+ inputs:
+ An instance of InputsMeshesProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMeshesProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMeshesProvider
+ outputs:
+ An instance of OutputsMeshesProvider.
"""
return super().outputs
@@ -224,14 +221,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._region_scoping)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Time/frequency set ids required in output.
+ Time/frequency set IDs required in output.
- Parameters
- ----------
- my_time_scoping : Scoping or int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -244,15 +242,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -265,15 +263,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -286,16 +284,15 @@ def data_sources(self):
return self._data_sources
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 1, cyclic symmetry is ignored. if 2,
- cyclic expansion is done (default is
- 1).
+ if 1, cyclic symmetry is ignored. If 2, cyclic expansion is done (default is 1).
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -308,18 +305,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def region_scoping(self):
- """Allows to connect region_scoping input to the operator.
+ def region_scoping(self) -> Input:
+ r"""Allows to connect region_scoping input to the operator.
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
+ region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).
- Parameters
- ----------
- my_region_scoping : Scoping or int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -350,18 +344,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._meshes)
@property
- def meshes(self):
- """Allows to get meshes output of the operator
+ def meshes(self) -> Output:
+ r"""Allows to get meshes output of the operator
Returns
- ----------
- my_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.meshes_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_meshes = op.outputs.meshes()
- """ # noqa: E501
+ """
return self._meshes
diff --git a/src/ansys/dpf/core/operators/mesh/node_coordinates.py b/src/ansys/dpf/core/operators/mesh/node_coordinates.py
index 3707904442e..7c6b01e3ea6 100644
--- a/src/ansys/dpf/core/operators/mesh/node_coordinates.py
+++ b/src/ansys/dpf/core/operators/mesh/node_coordinates.py
@@ -4,29 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class node_coordinates(Operator):
- """Returns the node coordinates of the mesh(es) in input.
+ r"""Returns the node coordinates of the mesh(es) in input.
+
Parameters
----------
- mesh : MeshedRegion or MeshesContainer
+ mesh: MeshedRegion or MeshesContainer
Returns
-------
- coordinates : Field or FieldsContainer
- If the input is a meshed region, a field of
- coordinates is the output, else if
- the input is a meshes container, a
- fields container (one field by mesh)
- is the output
+ coordinates: Field or FieldsContainer
+ if the input is a meshed region, a field of coordinates is the output, else if the input is a meshes container, a fields container (one field by mesh) is the output
Examples
--------
@@ -56,8 +57,9 @@ def __init__(self, mesh=None, config=None, server=None):
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Returns the node coordinates of the mesh(es) in input."""
+ def _spec() -> Specification:
+ description = r"""Returns the node coordinates of the mesh(es) in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,7 +67,7 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -73,18 +75,14 @@ def _spec():
name="coordinates",
type_names=["field", "fields_container"],
optional=False,
- document="""If the input is a meshed region, a field of
- coordinates is the output, else if
- the input is a meshes container, a
- fields container (one field by mesh)
- is the output""",
+ document=r"""if the input is a meshed region, a field of coordinates is the output, else if the input is a meshes container, a fields container (one field by mesh) is the output""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -93,29 +91,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh::node_coordinates", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsNodeCoordinates:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsNodeCoordinates
+ inputs:
+ An instance of InputsNodeCoordinates.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsNodeCoordinates:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsNodeCoordinates
+ outputs:
+ An instance of OutputsNodeCoordinates.
"""
return super().outputs
@@ -138,12 +143,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
diff --git a/src/ansys/dpf/core/operators/mesh/points_from_coordinates.py b/src/ansys/dpf/core/operators/mesh/points_from_coordinates.py
index 83d74d552cd..35c42745247 100644
--- a/src/ansys/dpf/core/operators/mesh/points_from_coordinates.py
+++ b/src/ansys/dpf/core/operators/mesh/points_from_coordinates.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class points_from_coordinates(Operator):
- """Extract a mesh made of points elements. This mesh is made from input
+ r"""Extract a mesh made of points elements. This mesh is made from input
meshes coordinates on the input scopings.
+
Parameters
----------
- nodes_to_keep : Scoping or ScopingsContainer
- mesh : MeshedRegion or MeshesContainer
+ nodes_to_keep: Scoping or ScopingsContainer
+ mesh: MeshedRegion or MeshesContainer
Returns
-------
- abstract_meshed_region : MeshedRegion
+ abstract_meshed_region: MeshedRegion
Examples
--------
@@ -59,9 +64,10 @@ def __init__(self, nodes_to_keep=None, mesh=None, config=None, server=None):
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Extract a mesh made of points elements. This mesh is made from input
- meshes coordinates on the input scopings."""
+ def _spec() -> Specification:
+ description = r"""Extract a mesh made of points elements. This mesh is made from input
+meshes coordinates on the input scopings.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -69,13 +75,13 @@ def _spec():
name="nodes_to_keep",
type_names=["scoping", "scopings_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -83,14 +89,14 @@ def _spec():
name="abstract_meshed_region",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -99,31 +105,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="mesh::points_from_coordinates", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsPointsFromCoordinates:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsPointsFromCoordinates
+ inputs:
+ An instance of InputsPointsFromCoordinates.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsPointsFromCoordinates:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsPointsFromCoordinates
+ outputs:
+ An instance of OutputsPointsFromCoordinates.
"""
return super().outputs
@@ -152,12 +165,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def nodes_to_keep(self):
- """Allows to connect nodes_to_keep input to the operator.
+ def nodes_to_keep(self) -> Input:
+ r"""Allows to connect nodes_to_keep input to the operator.
- Parameters
- ----------
- my_nodes_to_keep : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -170,12 +184,13 @@ def nodes_to_keep(self):
return self._nodes_to_keep
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -208,18 +223,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._abstract_meshed_region)
@property
- def abstract_meshed_region(self):
- """Allows to get abstract_meshed_region output of the operator
+ def abstract_meshed_region(self) -> Output:
+ r"""Allows to get abstract_meshed_region output of the operator
Returns
- ----------
- my_abstract_meshed_region : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.points_from_coordinates()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_abstract_meshed_region = op.outputs.abstract_meshed_region()
- """ # noqa: E501
+ """
return self._abstract_meshed_region
diff --git a/src/ansys/dpf/core/operators/mesh/skin.py b/src/ansys/dpf/core/operators/mesh/skin.py
index de605b19e36..9e9abb95f19 100644
--- a/src/ansys/dpf/core/operators/mesh/skin.py
+++ b/src/ansys/dpf/core/operators/mesh/skin.py
@@ -4,58 +4,42 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class skin(Operator):
- """Extracts a skin of the mesh in a new meshed region. The material ID of
+ r"""Extracts a skin of the mesh in a new meshed region. The material ID of
initial elements are propagated to their facets.
+
Parameters
----------
- mesh : MeshedRegion
- mesh_scoping : Scoping, optional
- Nodal scoping to restrict the skin extraction
- to a set of nodes. if provided, a
- skin element is added to the skin
- mesh if all its nodes are in the
- scoping.
- duplicate_shell : bool, optional
- If input mesh contains shell elements, output
- mesh shell elements (boolean = 1) are
- duplicated, one per each orientation,
- or (boolean = 0) remain unchanged.
- add_beam : bool, optional
- If input mesh contains beam elements, output
- mesh beam elements (boolean = 1) are
- added or (boolean = 0) are ignored.
+ mesh: MeshedRegion
+ mesh_scoping: Scoping, optional
+ Nodal scoping to restrict the skin extraction to a set of nodes. If provided, a skin element is added to the skin mesh if all its nodes are in the scoping.
+ duplicate_shell: bool, optional
+ If input mesh contains shell elements, output mesh shell elements (boolean = 1) are duplicated, one per each orientation, or (boolean = 0) remain unchanged.
+ add_beam: bool, optional
+ If input mesh contains beam elements, output mesh beam elements (boolean = 1) are added or (boolean = 0) are ignored.
Returns
-------
- mesh : MeshedRegion
- Skin meshed region with facets and
- facets_to_ele property fields.
- nodes_mesh_scoping : Scoping
- map_new_elements_to_old :
- property_field_new_elements_to_old : PropertyField
- This property field provides, for each new
- face element id (in the scoping), the
- corresponding 3d volume element index
- (in the data) it has been extracted
- from. the 3d volume element id can be
- found with the element scoping of the
- input mesh.
- facet_indices : PropertyField
- This property field gives, for each new face
- element id (in the scoping), the
- corresponding face index on the
- source 3d volume element. the 3d
- volume element can be extracted from
- the previous output.
+ mesh: MeshedRegion
+ Skin meshed region with facets and facets_to_ele property fields.
+ nodes_mesh_scoping: Scoping
+ map_new_elements_to_old:
+ property_field_new_elements_to_old: PropertyField
+ This property field provides, for each new face element ID (in the scoping), the corresponding 3D volume element index (in the data) it has been extracted from. The 3D volume element ID can be found with the element scoping of the input mesh.
+ facet_indices: PropertyField
+ This property field gives, for each new face element ID (in the scoping), the corresponding face index on the source 3D volume element. The 3D volume element can be extracted from the previous output.
Examples
--------
@@ -112,9 +96,10 @@ def __init__(
self.inputs.add_beam.connect(add_beam)
@staticmethod
- def _spec():
- description = """Extracts a skin of the mesh in a new meshed region. The material ID of
- initial elements are propagated to their facets."""
+ def _spec() -> Specification:
+ description = r"""Extracts a skin of the mesh in a new meshed region. The material ID of
+initial elements are propagated to their facets.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -122,34 +107,25 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""Nodal scoping to restrict the skin extraction
- to a set of nodes. if provided, a
- skin element is added to the skin
- mesh if all its nodes are in the
- scoping.""",
+ document=r"""Nodal scoping to restrict the skin extraction to a set of nodes. If provided, a skin element is added to the skin mesh if all its nodes are in the scoping.""",
),
2: PinSpecification(
name="duplicate_shell",
type_names=["bool"],
optional=True,
- document="""If input mesh contains shell elements, output
- mesh shell elements (boolean = 1) are
- duplicated, one per each orientation,
- or (boolean = 0) remain unchanged.""",
+ document=r"""If input mesh contains shell elements, output mesh shell elements (boolean = 1) are duplicated, one per each orientation, or (boolean = 0) remain unchanged.""",
),
3: PinSpecification(
name="add_beam",
type_names=["bool"],
optional=True,
- document="""If input mesh contains beam elements, output
- mesh beam elements (boolean = 1) are
- added or (boolean = 0) are ignored.""",
+ document=r"""If input mesh contains beam elements, output mesh beam elements (boolean = 1) are added or (boolean = 0) are ignored.""",
),
},
map_output_pin_spec={
@@ -157,50 +133,38 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""Skin meshed region with facets and
- facets_to_ele property fields.""",
+ document=r"""Skin meshed region with facets and facets_to_ele property fields.""",
),
1: PinSpecification(
name="nodes_mesh_scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="map_new_elements_to_old",
type_names=["umap"],
optional=False,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="property_field_new_elements_to_old",
type_names=["property_field"],
optional=False,
- document="""This property field provides, for each new
- face element id (in the scoping), the
- corresponding 3d volume element index
- (in the data) it has been extracted
- from. the 3d volume element id can be
- found with the element scoping of the
- input mesh.""",
+ document=r"""This property field provides, for each new face element ID (in the scoping), the corresponding 3D volume element index (in the data) it has been extracted from. The 3D volume element ID can be found with the element scoping of the input mesh.""",
),
4: PinSpecification(
name="facet_indices",
type_names=["property_field"],
optional=False,
- document="""This property field gives, for each new face
- element id (in the scoping), the
- corresponding face index on the
- source 3d volume element. the 3d
- volume element can be extracted from
- the previous output.""",
+ document=r"""This property field gives, for each new face element ID (in the scoping), the corresponding face index on the source 3D volume element. The 3D volume element can be extracted from the previous output.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -209,29 +173,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="meshed_skin_sector", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSkin:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSkin
+ inputs:
+ An instance of InputsSkin.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSkin:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSkin
+ outputs:
+ An instance of OutputsSkin.
"""
return super().outputs
@@ -266,12 +237,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._add_beam)
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -284,18 +256,15 @@ def mesh(self):
return self._mesh
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Nodal scoping to restrict the skin extraction
- to a set of nodes. if provided, a
- skin element is added to the skin
- mesh if all its nodes are in the
- scoping.
+ Nodal scoping to restrict the skin extraction to a set of nodes. If provided, a skin element is added to the skin mesh if all its nodes are in the scoping.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -308,17 +277,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def duplicate_shell(self):
- """Allows to connect duplicate_shell input to the operator.
+ def duplicate_shell(self) -> Input:
+ r"""Allows to connect duplicate_shell input to the operator.
- If input mesh contains shell elements, output
- mesh shell elements (boolean = 1) are
- duplicated, one per each orientation,
- or (boolean = 0) remain unchanged.
+ If input mesh contains shell elements, output mesh shell elements (boolean = 1) are duplicated, one per each orientation, or (boolean = 0) remain unchanged.
- Parameters
- ----------
- my_duplicate_shell : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -331,16 +298,15 @@ def duplicate_shell(self):
return self._duplicate_shell
@property
- def add_beam(self):
- """Allows to connect add_beam input to the operator.
+ def add_beam(self) -> Input:
+ r"""Allows to connect add_beam input to the operator.
- If input mesh contains beam elements, output
- mesh beam elements (boolean = 1) are
- added or (boolean = 0) are ignored.
+ If input mesh contains beam elements, output mesh beam elements (boolean = 1) are added or (boolean = 0) are ignored.
- Parameters
- ----------
- my_add_beam : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -385,86 +351,97 @@ def __init__(self, op: Operator):
self._outputs.append(self._facet_indices)
@property
- def mesh(self):
- """Allows to get mesh output of the operator
+ def mesh(self) -> Output:
+ r"""Allows to get mesh output of the operator
+
+ Skin meshed region with facets and facets_to_ele property fields.
Returns
- ----------
- my_mesh : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.skin()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mesh = op.outputs.mesh()
- """ # noqa: E501
+ """
return self._mesh
@property
- def nodes_mesh_scoping(self):
- """Allows to get nodes_mesh_scoping output of the operator
+ def nodes_mesh_scoping(self) -> Output:
+ r"""Allows to get nodes_mesh_scoping output of the operator
Returns
- ----------
- my_nodes_mesh_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.skin()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping()
- """ # noqa: E501
+ """
return self._nodes_mesh_scoping
@property
- def map_new_elements_to_old(self):
- """Allows to get map_new_elements_to_old output of the operator
+ def map_new_elements_to_old(self) -> Output:
+ r"""Allows to get map_new_elements_to_old output of the operator
Returns
- ----------
- my_map_new_elements_to_old :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.skin()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_map_new_elements_to_old = op.outputs.map_new_elements_to_old()
- """ # noqa: E501
+ """
return self._map_new_elements_to_old
@property
- def property_field_new_elements_to_old(self):
- """Allows to get property_field_new_elements_to_old output of the operator
+ def property_field_new_elements_to_old(self) -> Output:
+ r"""Allows to get property_field_new_elements_to_old output of the operator
+
+ This property field provides, for each new face element ID (in the scoping), the corresponding 3D volume element index (in the data) it has been extracted from. The 3D volume element ID can be found with the element scoping of the input mesh.
Returns
- ----------
- my_property_field_new_elements_to_old : PropertyField
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.skin()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_property_field_new_elements_to_old = op.outputs.property_field_new_elements_to_old()
- """ # noqa: E501
+ """
return self._property_field_new_elements_to_old
@property
- def facet_indices(self):
- """Allows to get facet_indices output of the operator
+ def facet_indices(self) -> Output:
+ r"""Allows to get facet_indices output of the operator
+
+ This property field gives, for each new face element ID (in the scoping), the corresponding face index on the source 3D volume element. The 3D volume element can be extracted from the previous output.
Returns
- ----------
- my_facet_indices : PropertyField
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.skin()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_facet_indices = op.outputs.facet_indices()
- """ # noqa: E501
+ """
return self._facet_indices
diff --git a/src/ansys/dpf/core/operators/mesh/split_fields.py b/src/ansys/dpf/core/operators/mesh/split_fields.py
index 5f7146c9b12..357580c37a1 100644
--- a/src/ansys/dpf/core/operators/mesh/split_fields.py
+++ b/src/ansys/dpf/core/operators/mesh/split_fields.py
@@ -4,27 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class split_fields(Operator):
- """Split the input field or fields container based on the input mesh
+ r"""Split the input field or fields container based on the input mesh
regions
+
Parameters
----------
- field_or_fields_container : Field or FieldsContainer
- meshes : MeshesContainer
- Body meshes in the mesh controller cannot be
- mixed shell/solid
+ field_or_fields_container: Field or FieldsContainer
+ meshes: MeshesContainer
+ body meshes in the mesh controller cannot be mixed shell/solid
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -61,9 +65,10 @@ def __init__(
self.inputs.meshes.connect(meshes)
@staticmethod
- def _spec():
- description = """Split the input field or fields container based on the input mesh
- regions"""
+ def _spec() -> Specification:
+ description = r"""Split the input field or fields container based on the input mesh
+regions
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -71,14 +76,13 @@ def _spec():
name="field_or_fields_container",
type_names=["field", "fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="meshes",
type_names=["meshes_container"],
optional=False,
- document="""Body meshes in the mesh controller cannot be
- mixed shell/solid""",
+ document=r"""body meshes in the mesh controller cannot be mixed shell/solid""",
),
},
map_output_pin_spec={
@@ -86,14 +90,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -102,29 +106,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="split_fields", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSplitFields:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSplitFields
+ inputs:
+ An instance of InputsSplitFields.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSplitFields:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSplitFields
+ outputs:
+ An instance of OutputsSplitFields.
"""
return super().outputs
@@ -153,12 +164,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._meshes)
@property
- def field_or_fields_container(self):
- """Allows to connect field_or_fields_container input to the operator.
+ def field_or_fields_container(self) -> Input:
+ r"""Allows to connect field_or_fields_container input to the operator.
- Parameters
- ----------
- my_field_or_fields_container : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -171,15 +183,15 @@ def field_or_fields_container(self):
return self._field_or_fields_container
@property
- def meshes(self):
- """Allows to connect meshes input to the operator.
+ def meshes(self) -> Input:
+ r"""Allows to connect meshes input to the operator.
- Body meshes in the mesh controller cannot be
- mixed shell/solid
+ body meshes in the mesh controller cannot be mixed shell/solid
- Parameters
- ----------
- my_meshes : MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -210,18 +222,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.split_fields()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/mesh/split_mesh.py b/src/ansys/dpf/core/operators/mesh/split_mesh.py
index 9c739433ea2..67522ab90f9 100644
--- a/src/ansys/dpf/core/operators/mesh/split_mesh.py
+++ b/src/ansys/dpf/core/operators/mesh/split_mesh.py
@@ -4,27 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class split_mesh(Operator):
- """Split the input mesh into several meshes based on a given property
+ r"""Split the input mesh into several meshes based on a given property
(material property be default)
+
Parameters
----------
- mesh_scoping : Scoping, optional
+ mesh_scoping: Scoping, optional
Scoping
- mesh : MeshedRegion
- property : str
+ mesh: MeshedRegion
+ property: str
Returns
-------
- meshes : MeshesContainer
+ meshes: MeshesContainer
Examples
--------
@@ -66,9 +71,10 @@ def __init__(
self.inputs.property.connect(property)
@staticmethod
- def _spec():
- description = """Split the input mesh into several meshes based on a given property
- (material property be default)"""
+ def _spec() -> Specification:
+ description = r"""Split the input mesh into several meshes based on a given property
+(material property be default)
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -76,19 +82,19 @@ def _spec():
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""Scoping""",
+ document=r"""Scoping""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
13: PinSpecification(
name="property",
type_names=["string"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -96,14 +102,14 @@ def _spec():
name="meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -112,29 +118,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="split_mesh", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsSplitMesh:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsSplitMesh
+ inputs:
+ An instance of InputsSplitMesh.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsSplitMesh:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsSplitMesh
+ outputs:
+ An instance of OutputsSplitMesh.
"""
return super().outputs
@@ -165,14 +178,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._property)
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
Scoping
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -185,12 +199,13 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -203,12 +218,13 @@ def mesh(self):
return self._mesh
@property
- def property(self):
- """Allows to connect property input to the operator.
+ def property(self) -> Input:
+ r"""Allows to connect property input to the operator.
- Parameters
- ----------
- my_property : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -239,18 +255,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._meshes)
@property
- def meshes(self):
- """Allows to get meshes output of the operator
+ def meshes(self) -> Output:
+ r"""Allows to get meshes output of the operator
Returns
- ----------
- my_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.split_mesh()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_meshes = op.outputs.meshes()
- """ # noqa: E501
+ """
return self._meshes
diff --git a/src/ansys/dpf/core/operators/mesh/stl_export.py b/src/ansys/dpf/core/operators/mesh/stl_export.py
index 93d9f066c6f..81038cdc168 100644
--- a/src/ansys/dpf/core/operators/mesh/stl_export.py
+++ b/src/ansys/dpf/core/operators/mesh/stl_export.py
@@ -4,24 +4,29 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class stl_export(Operator):
- """export a mesh into a stl file.
+ r"""export a mesh into a stl file.
+
Parameters
----------
- mesh : MeshedRegion
- file_path : str
+ mesh: MeshedRegion
+ file_path: str
Returns
-------
- data_sources : DataSources
+ data_sources: DataSources
Examples
--------
@@ -56,8 +61,9 @@ def __init__(self, mesh=None, file_path=None, config=None, server=None):
self.inputs.file_path.connect(file_path)
@staticmethod
- def _spec():
- description = """export a mesh into a stl file."""
+ def _spec() -> Specification:
+ description = r"""export a mesh into a stl file.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,13 +71,13 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="file_path",
type_names=["string"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -79,14 +85,14 @@ def _spec():
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -95,29 +101,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="stl_export", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsStlExport:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsStlExport
+ inputs:
+ An instance of InputsStlExport.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsStlExport:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsStlExport
+ outputs:
+ An instance of OutputsStlExport.
"""
return super().outputs
@@ -144,12 +157,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._file_path)
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -162,12 +176,13 @@ def mesh(self):
return self._mesh
@property
- def file_path(self):
- """Allows to connect file_path input to the operator.
+ def file_path(self) -> Input:
+ r"""Allows to connect file_path input to the operator.
- Parameters
- ----------
- my_file_path : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -198,18 +213,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._data_sources)
@property
- def data_sources(self):
- """Allows to get data_sources output of the operator
+ def data_sources(self) -> Output:
+ r"""Allows to get data_sources output of the operator
Returns
- ----------
- my_data_sources : DataSources
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.stl_export()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_data_sources = op.outputs.data_sources()
- """ # noqa: E501
+ """
return self._data_sources
diff --git a/src/ansys/dpf/core/operators/mesh/tri_mesh_skin.py b/src/ansys/dpf/core/operators/mesh/tri_mesh_skin.py
index b9edf5ce748..d96572f8c1f 100644
--- a/src/ansys/dpf/core/operators/mesh/tri_mesh_skin.py
+++ b/src/ansys/dpf/core/operators/mesh/tri_mesh_skin.py
@@ -4,35 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class tri_mesh_skin(Operator):
- """Extracts a skin of the mesh in triangles in a new meshed region.
+ r"""Extracts a skin of the mesh in triangles in a new meshed region.
+
Parameters
----------
- mesh : MeshedRegion
- include_surfaces : bool, optional
- True: meshing will also take into account
- shell and skin elements. false:
- meshing will ignore shell and skin
- elements. the default is false.
- mesh_scoping : Scoping, optional
- Nodal scoping to restrict the skin extraction
- to a set of nodes. if provided, a
- skin element is added to the skin
- mesh if all its nodes are in the
- scoping.
+ mesh: MeshedRegion
+ include_surfaces: bool, optional
+ True: meshing will also take into account shell and skin elements. False: meshing will ignore shell and skin elements. The default is false.
+ mesh_scoping: Scoping, optional
+ Nodal scoping to restrict the skin extraction to a set of nodes. If provided, a skin element is added to the skin mesh if all its nodes are in the scoping.
Returns
-------
- mesh : MeshedRegion
- nodes_mesh_scoping : Scoping
+ mesh: MeshedRegion
+ nodes_mesh_scoping: Scoping
Examples
--------
@@ -82,10 +80,9 @@ def __init__(
self.inputs.mesh_scoping.connect(mesh_scoping)
@staticmethod
- def _spec():
- description = (
- """Extracts a skin of the mesh in triangles in a new meshed region."""
- )
+ def _spec() -> Specification:
+ description = r"""Extracts a skin of the mesh in triangles in a new meshed region.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -93,26 +90,19 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="include_surfaces",
type_names=["bool"],
optional=True,
- document="""True: meshing will also take into account
- shell and skin elements. false:
- meshing will ignore shell and skin
- elements. the default is false.""",
+ document=r"""True: meshing will also take into account shell and skin elements. False: meshing will ignore shell and skin elements. The default is false.""",
),
2: PinSpecification(
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""Nodal scoping to restrict the skin extraction
- to a set of nodes. if provided, a
- skin element is added to the skin
- mesh if all its nodes are in the
- scoping.""",
+ document=r"""Nodal scoping to restrict the skin extraction to a set of nodes. If provided, a skin element is added to the skin mesh if all its nodes are in the scoping.""",
),
},
map_output_pin_spec={
@@ -120,20 +110,20 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="nodes_mesh_scoping",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -142,31 +132,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="meshed_skin_sector_triangle", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsTriMeshSkin:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTriMeshSkin
+ inputs:
+ An instance of InputsTriMeshSkin.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTriMeshSkin:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTriMeshSkin
+ outputs:
+ An instance of OutputsTriMeshSkin.
"""
return super().outputs
@@ -197,12 +194,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh_scoping)
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -215,17 +213,15 @@ def mesh(self):
return self._mesh
@property
- def include_surfaces(self):
- """Allows to connect include_surfaces input to the operator.
+ def include_surfaces(self) -> Input:
+ r"""Allows to connect include_surfaces input to the operator.
- True: meshing will also take into account
- shell and skin elements. false:
- meshing will ignore shell and skin
- elements. the default is false.
+ True: meshing will also take into account shell and skin elements. False: meshing will ignore shell and skin elements. The default is false.
- Parameters
- ----------
- my_include_surfaces : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -238,18 +234,15 @@ def include_surfaces(self):
return self._include_surfaces
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Nodal scoping to restrict the skin extraction
- to a set of nodes. if provided, a
- skin element is added to the skin
- mesh if all its nodes are in the
- scoping.
+ Nodal scoping to restrict the skin extraction to a set of nodes. If provided, a skin element is added to the skin mesh if all its nodes are in the scoping.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -283,35 +276,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._nodes_mesh_scoping)
@property
- def mesh(self):
- """Allows to get mesh output of the operator
+ def mesh(self) -> Output:
+ r"""Allows to get mesh output of the operator
Returns
- ----------
- my_mesh : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.tri_mesh_skin()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mesh = op.outputs.mesh()
- """ # noqa: E501
+ """
return self._mesh
@property
- def nodes_mesh_scoping(self):
- """Allows to get nodes_mesh_scoping output of the operator
+ def nodes_mesh_scoping(self) -> Output:
+ r"""Allows to get nodes_mesh_scoping output of the operator
Returns
- ----------
- my_nodes_mesh_scoping : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.tri_mesh_skin()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_nodes_mesh_scoping = op.outputs.nodes_mesh_scoping()
- """ # noqa: E501
+ """
return self._nodes_mesh_scoping
diff --git a/src/ansys/dpf/core/operators/mesh/wireframe.py b/src/ansys/dpf/core/operators/mesh/wireframe.py
index f60f0d129d8..b03d2396388 100644
--- a/src/ansys/dpf/core/operators/mesh/wireframe.py
+++ b/src/ansys/dpf/core/operators/mesh/wireframe.py
@@ -4,27 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class wireframe(Operator):
- """Take a mesh and extracts its sharp edges, using pin 1 value as a
+ r"""Take a mesh and extracts its sharp edges, using pin 1 value as a
threshold angle.
+
Parameters
----------
- mesh : MeshedRegion
- threshold : float
- Angle threshold in radian that will trigger
- an edge detection.
+ mesh: MeshedRegion
+ threshold: float
+ angle threshold in radian that will trigger an edge detection.
Returns
-------
- wireframe : MeshedRegion
+ wireframe: MeshedRegion
Examples
--------
@@ -59,9 +63,10 @@ def __init__(self, mesh=None, threshold=None, config=None, server=None):
self.inputs.threshold.connect(threshold)
@staticmethod
- def _spec():
- description = """Take a mesh and extracts its sharp edges, using pin 1 value as a
- threshold angle."""
+ def _spec() -> Specification:
+ description = r"""Take a mesh and extracts its sharp edges, using pin 1 value as a
+threshold angle.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -69,14 +74,13 @@ def _spec():
name="mesh",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="threshold",
type_names=["double"],
optional=False,
- document="""Angle threshold in radian that will trigger
- an edge detection.""",
+ document=r"""angle threshold in radian that will trigger an edge detection.""",
),
},
map_output_pin_spec={
@@ -84,14 +88,14 @@ def _spec():
name="wireframe",
type_names=["abstract_meshed_region"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -100,29 +104,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="wireframe", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsWireframe:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsWireframe
+ inputs:
+ An instance of InputsWireframe.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsWireframe:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsWireframe
+ outputs:
+ An instance of OutputsWireframe.
"""
return super().outputs
@@ -149,12 +160,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._threshold)
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -167,15 +179,15 @@ def mesh(self):
return self._mesh
@property
- def threshold(self):
- """Allows to connect threshold input to the operator.
+ def threshold(self) -> Input:
+ r"""Allows to connect threshold input to the operator.
- Angle threshold in radian that will trigger
- an edge detection.
+ angle threshold in radian that will trigger an edge detection.
- Parameters
- ----------
- my_threshold : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -206,18 +218,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._wireframe)
@property
- def wireframe(self):
- """Allows to get wireframe output of the operator
+ def wireframe(self) -> Output:
+ r"""Allows to get wireframe output of the operator
Returns
- ----------
- my_wireframe : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.mesh.wireframe()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_wireframe = op.outputs.wireframe()
- """ # noqa: E501
+ """
return self._wireframe
diff --git a/src/ansys/dpf/core/operators/metadata/boundary_condition_provider.py b/src/ansys/dpf/core/operators/metadata/boundary_condition_provider.py
index c7e97d81b36..add2832aead 100644
--- a/src/ansys/dpf/core/operators/metadata/boundary_condition_provider.py
+++ b/src/ansys/dpf/core/operators/metadata/boundary_condition_provider.py
@@ -4,27 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class boundary_condition_provider(Operator):
- """Reads boundary conditions from the results files contained in the
+ r"""Reads boundary conditions from the results files contained in the
streams or data sources.
+
Parameters
----------
- streams_container : StreamsContainer, optional
- data_sources : DataSources
+ streams_container: StreamsContainer, optional
+ data_sources: DataSources
Returns
-------
- results_info : Field or FieldsContainer
- Results info
+ results_info: Field or FieldsContainer
+ results info
Examples
--------
@@ -61,9 +66,10 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Reads boundary conditions from the results files contained in the
- streams or data sources."""
+ def _spec() -> Specification:
+ description = r"""Reads boundary conditions from the results files contained in the
+streams or data sources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -71,13 +77,13 @@ def _spec():
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""""",
+ document=r"""""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -85,14 +91,14 @@ def _spec():
name="results_info",
type_names=["field", "fields_container"],
optional=False,
- document="""Results info""",
+ document=r"""results info""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -101,29 +107,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="boundary_conditions", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsBoundaryConditionProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsBoundaryConditionProvider
+ inputs:
+ An instance of InputsBoundaryConditionProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsBoundaryConditionProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsBoundaryConditionProvider
+ outputs:
+ An instance of OutputsBoundaryConditionProvider.
"""
return super().outputs
@@ -154,12 +167,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -172,12 +186,13 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
diff --git a/src/ansys/dpf/core/operators/metadata/coordinate_system_data_provider.py b/src/ansys/dpf/core/operators/metadata/coordinate_system_data_provider.py
index 246db091dd8..9cb1848dc9f 100644
--- a/src/ansys/dpf/core/operators/metadata/coordinate_system_data_provider.py
+++ b/src/ansys/dpf/core/operators/metadata/coordinate_system_data_provider.py
@@ -4,34 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class coordinate_system_data_provider(Operator):
- """Reads coordinate systems data from the result files contained in the
+ r"""Reads coordinate systems data from the result files contained in the
streams or data sources.
+
Parameters
----------
- solver_coordinate_system_ids : int, optional
- Coorfinate system ids to recover used by the
- solver. if not set, all available
- materials to be recovered.
- streams : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data.
- data_sources : DataSources
- Result file path container, used if no
- streams are set.
+ solver_coordinate_system_ids: int, optional
+ Coorfinate System ids to recover used by the solver. If not set, all available materials to be recovered.
+ streams: StreamsContainer, optional
+ Result file container allowed to be kept open to cache data.
+ data_sources: DataSources
+ Result file path container, used if no streams are set.
Returns
-------
- coordinate_system_data1 : GenericDataContainer
- coordinate_system_data2 : GenericDataContainer
+ coordinate_system_data1: GenericDataContainer
+ coordinate_system_data2: GenericDataContainer
Examples
--------
@@ -83,9 +84,10 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Reads coordinate systems data from the result files contained in the
- streams or data sources."""
+ def _spec() -> Specification:
+ description = r"""Reads coordinate systems data from the result files contained in the
+streams or data sources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -93,23 +95,19 @@ def _spec():
name="solver_coordinate_system_ids",
type_names=["int32", "vector"],
optional=True,
- document="""Coorfinate system ids to recover used by the
- solver. if not set, all available
- materials to be recovered.""",
+ document=r"""Coorfinate System ids to recover used by the solver. If not set, all available materials to be recovered.""",
),
3: PinSpecification(
name="streams",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data.""",
+ document=r"""Result file container allowed to be kept open to cache data.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set.""",
+ document=r"""Result file path container, used if no streams are set.""",
),
},
map_output_pin_spec={
@@ -117,20 +115,20 @@ def _spec():
name="coordinate_system_data1",
type_names=["generic_data_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="coordinate_system_data2",
type_names=["generic_data_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -139,31 +137,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="coordinate_systems_data_provider", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsCoordinateSystemDataProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCoordinateSystemDataProvider
+ inputs:
+ An instance of InputsCoordinateSystemDataProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCoordinateSystemDataProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCoordinateSystemDataProvider
+ outputs:
+ An instance of OutputsCoordinateSystemDataProvider.
"""
return super().outputs
@@ -200,16 +205,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def solver_coordinate_system_ids(self):
- """Allows to connect solver_coordinate_system_ids input to the operator.
+ def solver_coordinate_system_ids(self) -> Input:
+ r"""Allows to connect solver_coordinate_system_ids input to the operator.
- Coorfinate system ids to recover used by the
- solver. if not set, all available
- materials to be recovered.
+ Coorfinate System ids to recover used by the solver. If not set, all available materials to be recovered.
- Parameters
- ----------
- my_solver_coordinate_system_ids : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -222,15 +226,15 @@ def solver_coordinate_system_ids(self):
return self._solver_coordinate_system_ids
@property
- def streams(self):
- """Allows to connect streams input to the operator.
+ def streams(self) -> Input:
+ r"""Allows to connect streams input to the operator.
- Result file container allowed to be kept open
- to cache data.
+ Result file container allowed to be kept open to cache data.
- Parameters
- ----------
- my_streams : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -243,15 +247,15 @@ def streams(self):
return self._streams
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set.
+ Result file path container, used if no streams are set.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -289,35 +293,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._coordinate_system_data2)
@property
- def coordinate_system_data1(self):
- """Allows to get coordinate_system_data1 output of the operator
+ def coordinate_system_data1(self) -> Output:
+ r"""Allows to get coordinate_system_data1 output of the operator
Returns
- ----------
- my_coordinate_system_data1 : GenericDataContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.coordinate_system_data_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_coordinate_system_data1 = op.outputs.coordinate_system_data1()
- """ # noqa: E501
+ """
return self._coordinate_system_data1
@property
- def coordinate_system_data2(self):
- """Allows to get coordinate_system_data2 output of the operator
+ def coordinate_system_data2(self) -> Output:
+ r"""Allows to get coordinate_system_data2 output of the operator
Returns
- ----------
- my_coordinate_system_data2 : GenericDataContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.coordinate_system_data_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_coordinate_system_data2 = op.outputs.coordinate_system_data2()
- """ # noqa: E501
+ """
return self._coordinate_system_data2
diff --git a/src/ansys/dpf/core/operators/metadata/cyclic_mesh_expansion.py b/src/ansys/dpf/core/operators/metadata/cyclic_mesh_expansion.py
index 32b5419e4dc..ec758b32d40 100644
--- a/src/ansys/dpf/core/operators/metadata/cyclic_mesh_expansion.py
+++ b/src/ansys/dpf/core/operators/metadata/cyclic_mesh_expansion.py
@@ -4,33 +4,34 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_mesh_expansion(Operator):
- """Expand the mesh.
+ r"""Expand the mesh.
+
Parameters
----------
- sector_meshed_region : MeshedRegion or MeshesContainer, optional
- cyclic_support : CyclicSupport
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
+ sector_meshed_region: MeshedRegion or MeshesContainer, optional
+ cyclic_support: CyclicSupport
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
Returns
-------
- meshed_region : MeshedRegion
- Expanded meshed region.
- cyclic_support : CyclicSupport
- Input cyclic support modified in place
- containing the new expanded meshed
- regions.
+ meshed_region: MeshedRegion
+ expanded meshed region.
+ cyclic_support: CyclicSupport
+ input cyclic support modified in place containing the new expanded meshed regions.
Examples
--------
@@ -78,8 +79,9 @@ def __init__(
self.inputs.sectors_to_expand.connect(sectors_to_expand)
@staticmethod
- def _spec():
- description = """Expand the mesh."""
+ def _spec() -> Specification:
+ description = r"""Expand the mesh.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -87,21 +89,19 @@ def _spec():
name="sector_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""""",
+ document=r"""""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.""",
),
},
map_output_pin_spec={
@@ -109,22 +109,20 @@ def _spec():
name="meshed_region",
type_names=["abstract_meshed_region"],
optional=False,
- document="""Expanded meshed region.""",
+ document=r"""expanded meshed region.""",
),
1: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=False,
- document="""Input cyclic support modified in place
- containing the new expanded meshed
- regions.""",
+ document=r"""input cyclic support modified in place containing the new expanded meshed regions.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -133,29 +131,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cyclic_expansion_mesh", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicMeshExpansion:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicMeshExpansion
+ inputs:
+ An instance of InputsCyclicMeshExpansion.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicMeshExpansion:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicMeshExpansion
+ outputs:
+ An instance of OutputsCyclicMeshExpansion.
"""
return super().outputs
@@ -192,12 +197,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._sectors_to_expand)
@property
- def sector_meshed_region(self):
- """Allows to connect sector_meshed_region input to the operator.
+ def sector_meshed_region(self) -> Input:
+ r"""Allows to connect sector_meshed_region input to the operator.
- Parameters
- ----------
- my_sector_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -210,12 +216,13 @@ def sector_meshed_region(self):
return self._sector_meshed_region
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -228,16 +235,15 @@ def cyclic_support(self):
return self._cyclic_support
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -273,35 +279,41 @@ def __init__(self, op: Operator):
self._outputs.append(self._cyclic_support)
@property
- def meshed_region(self):
- """Allows to get meshed_region output of the operator
+ def meshed_region(self) -> Output:
+ r"""Allows to get meshed_region output of the operator
+
+ expanded meshed region.
Returns
- ----------
- my_meshed_region : MeshedRegion
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.cyclic_mesh_expansion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_meshed_region = op.outputs.meshed_region()
- """ # noqa: E501
+ """
return self._meshed_region
@property
- def cyclic_support(self):
- """Allows to get cyclic_support output of the operator
+ def cyclic_support(self) -> Output:
+ r"""Allows to get cyclic_support output of the operator
+
+ input cyclic support modified in place containing the new expanded meshed regions.
Returns
- ----------
- my_cyclic_support : CyclicSupport
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.cyclic_mesh_expansion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_cyclic_support = op.outputs.cyclic_support()
- """ # noqa: E501
+ """
return self._cyclic_support
diff --git a/src/ansys/dpf/core/operators/metadata/cyclic_support_provider.py b/src/ansys/dpf/core/operators/metadata/cyclic_support_provider.py
index dff88938947..fbfac8a4b70 100644
--- a/src/ansys/dpf/core/operators/metadata/cyclic_support_provider.py
+++ b/src/ansys/dpf/core/operators/metadata/cyclic_support_provider.py
@@ -4,37 +4,39 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_support_provider(Operator):
- """Read the cyclic support (DPF entity containing necessary information
- for expansions) and expands the mesh.
+ r"""Read the cyclic support (DPF entity containing necessary information for
+ expansions) and expands the mesh.
+
Parameters
----------
- streams_container : StreamsContainer, optional
+ streams_container: StreamsContainer, optional
Streams containing the result file.
- data_sources : DataSources
- Data sources containing the result file.
- sector_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh of the first sector.
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- If this pin is set, expanding the mesh is not
- necessary.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
+ data_sources: DataSources
+ data sources containing the result file.
+ sector_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh of the first sector.
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ if this pin is set, expanding the mesh is not necessary.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
Returns
-------
- cyclic_support : CyclicSupport
- sector_meshes : MeshesContainer
+ cyclic_support: CyclicSupport
+ sector_meshes: MeshesContainer
Examples
--------
@@ -96,9 +98,10 @@ def __init__(
self.inputs.sectors_to_expand.connect(sectors_to_expand)
@staticmethod
- def _spec():
- description = """Read the cyclic support (DPF entity containing necessary information
- for expansions) and expands the mesh."""
+ def _spec() -> Specification:
+ description = r"""Read the cyclic support (DPF entity containing necessary information for
+expansions) and expands the mesh.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -106,34 +109,31 @@ def _spec():
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Streams containing the result file.""",
+ document=r"""Streams containing the result file.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data sources containing the result file.""",
+ document=r"""data sources containing the result file.""",
),
7: PinSpecification(
name="sector_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh of the first sector.""",
+ document=r"""mesh of the first sector.""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""If this pin is set, expanding the mesh is not
- necessary.""",
+ document=r"""if this pin is set, expanding the mesh is not necessary.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["scoping", "scopings_container", "vector"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.""",
),
},
map_output_pin_spec={
@@ -141,20 +141,20 @@ def _spec():
name="cyclic_support",
type_names=["cyclic_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="sector_meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -163,31 +163,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="mapdl::rst::support_provider_cyclic", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicSupportProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicSupportProvider
+ inputs:
+ An instance of InputsCyclicSupportProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicSupportProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicSupportProvider
+ outputs:
+ An instance of OutputsCyclicSupportProvider.
"""
return super().outputs
@@ -236,14 +243,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._sectors_to_expand)
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams containing the result file.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -256,14 +264,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data sources containing the result file.
+ data sources containing the result file.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -276,14 +285,15 @@ def data_sources(self):
return self._data_sources
@property
- def sector_meshed_region(self):
- """Allows to connect sector_meshed_region input to the operator.
+ def sector_meshed_region(self) -> Input:
+ r"""Allows to connect sector_meshed_region input to the operator.
- Mesh of the first sector.
+ mesh of the first sector.
- Parameters
- ----------
- my_sector_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -296,15 +306,15 @@ def sector_meshed_region(self):
return self._sector_meshed_region
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- If this pin is set, expanding the mesh is not
- necessary.
+ if this pin is set, expanding the mesh is not necessary.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -317,16 +327,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -364,35 +373,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._sector_meshes)
@property
- def cyclic_support(self):
- """Allows to get cyclic_support output of the operator
+ def cyclic_support(self) -> Output:
+ r"""Allows to get cyclic_support output of the operator
Returns
- ----------
- my_cyclic_support : CyclicSupport
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.cyclic_support_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_cyclic_support = op.outputs.cyclic_support()
- """ # noqa: E501
+ """
return self._cyclic_support
@property
- def sector_meshes(self):
- """Allows to get sector_meshes output of the operator
+ def sector_meshes(self) -> Output:
+ r"""Allows to get sector_meshes output of the operator
Returns
- ----------
- my_sector_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.cyclic_support_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_sector_meshes = op.outputs.sector_meshes()
- """ # noqa: E501
+ """
return self._sector_meshes
diff --git a/src/ansys/dpf/core/operators/metadata/datasources_provider.py b/src/ansys/dpf/core/operators/metadata/datasources_provider.py
index b31bc817a13..b907a410e1a 100644
--- a/src/ansys/dpf/core/operators/metadata/datasources_provider.py
+++ b/src/ansys/dpf/core/operators/metadata/datasources_provider.py
@@ -4,23 +4,28 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class datasources_provider(Operator):
- """Creates a DataSources by expanding another.
+ r"""Creates a DataSources by expanding another.
+
Parameters
----------
- data_sources : DataSources
+ data_sources: DataSources
Returns
-------
- data_sources : DataSources
+ data_sources: DataSources
Examples
--------
@@ -50,8 +55,9 @@ def __init__(self, data_sources=None, config=None, server=None):
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Creates a DataSources by expanding another."""
+ def _spec() -> Specification:
+ description = r"""Creates a DataSources by expanding another.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -59,7 +65,7 @@ def _spec():
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -67,14 +73,14 @@ def _spec():
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -83,29 +89,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="datasources_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsDatasourcesProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsDatasourcesProvider
+ inputs:
+ An instance of InputsDatasourcesProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsDatasourcesProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsDatasourcesProvider
+ outputs:
+ An instance of OutputsDatasourcesProvider.
"""
return super().outputs
@@ -128,12 +141,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -164,18 +178,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._data_sources)
@property
- def data_sources(self):
- """Allows to get data_sources output of the operator
+ def data_sources(self) -> Output:
+ r"""Allows to get data_sources output of the operator
Returns
- ----------
- my_data_sources : DataSources
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.datasources_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_data_sources = op.outputs.data_sources()
- """ # noqa: E501
+ """
return self._data_sources
diff --git a/src/ansys/dpf/core/operators/metadata/element_types_provider.py b/src/ansys/dpf/core/operators/metadata/element_types_provider.py
index fd359a72795..9c85d5abf75 100644
--- a/src/ansys/dpf/core/operators/metadata/element_types_provider.py
+++ b/src/ansys/dpf/core/operators/metadata/element_types_provider.py
@@ -4,47 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class element_types_provider(Operator):
- """Reads element types data from the result files contained in the
- streams or data sources.
+ r"""Reads element types data from the result files contained in the streams
+ or data sources.
+
Parameters
----------
- solver_element_types_ids : int, optional
- Element type ids to recover used by the
- solver. if not set, all available
- element types to be recovered.
- streams : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data.
- data_sources : DataSources
- Result file path container, used if no
- streams are set.
+ solver_element_types_ids: int, optional
+ Element Type ids to recover used by the solver. If not set, all available element types to be recovered.
+ streams: StreamsContainer, optional
+ Result file container allowed to be kept open to cache data.
+ data_sources: DataSources
+ Result file path container, used if no streams are set.
Returns
-------
- element_types_data : GenericDataContainer
- The generic_data_container has a class_name:
- elementtypesproperties. it contains
- the following property fields:
- element_routine_number: element
- routine number. e.g 186 for solid186,
- keyopts: element type option keys,
- kdofs: dof/node for this element
- type.this is a bit mapping, nodelm:
- number of nodes for this element
- type, nodfor: number of nodes per
- element having nodal forces, nodstr:
- number of nodes per element having
- nodal stresses, new_gen_element:
- element of new generation.
+ element_types_data: GenericDataContainer
+ The generic_data_container has a class_name: ElementTypesProperties. It contains the following property fields: element_routine_number: Element routine number. E.g 186 for SOLID186, keyopts: Element type option keys, kdofs: DOF/node for this element type.This is a bit mapping, nodelm: Number of nodes for this element type, nodfor: Number of nodes per element having nodal forces, nodstr: Number of nodes per element having nodal stresses, new_gen_element: Element of new generation.
Examples
--------
@@ -91,9 +79,10 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Reads element types data from the result files contained in the
- streams or data sources."""
+ def _spec() -> Specification:
+ description = r"""Reads element types data from the result files contained in the streams
+or data sources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -101,23 +90,19 @@ def _spec():
name="solver_element_types_ids",
type_names=["int32", "vector"],
optional=True,
- document="""Element type ids to recover used by the
- solver. if not set, all available
- element types to be recovered.""",
+ document=r"""Element Type ids to recover used by the solver. If not set, all available element types to be recovered.""",
),
3: PinSpecification(
name="streams",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data.""",
+ document=r"""Result file container allowed to be kept open to cache data.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set.""",
+ document=r"""Result file path container, used if no streams are set.""",
),
},
map_output_pin_spec={
@@ -125,27 +110,14 @@ def _spec():
name="element_types_data",
type_names=["generic_data_container"],
optional=False,
- document="""The generic_data_container has a class_name:
- elementtypesproperties. it contains
- the following property fields:
- element_routine_number: element
- routine number. e.g 186 for solid186,
- keyopts: element type option keys,
- kdofs: dof/node for this element
- type.this is a bit mapping, nodelm:
- number of nodes for this element
- type, nodfor: number of nodes per
- element having nodal forces, nodstr:
- number of nodes per element having
- nodal stresses, new_gen_element:
- element of new generation.""",
+ document=r"""The generic_data_container has a class_name: ElementTypesProperties. It contains the following property fields: element_routine_number: Element routine number. E.g 186 for SOLID186, keyopts: Element type option keys, kdofs: DOF/node for this element type.This is a bit mapping, nodelm: Number of nodes for this element type, nodfor: Number of nodes per element having nodal forces, nodstr: Number of nodes per element having nodal stresses, new_gen_element: Element of new generation.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -154,29 +126,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="element_types_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementTypesProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementTypesProvider
+ inputs:
+ An instance of InputsElementTypesProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementTypesProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementTypesProvider
+ outputs:
+ An instance of OutputsElementTypesProvider.
"""
return super().outputs
@@ -211,16 +190,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def solver_element_types_ids(self):
- """Allows to connect solver_element_types_ids input to the operator.
+ def solver_element_types_ids(self) -> Input:
+ r"""Allows to connect solver_element_types_ids input to the operator.
- Element type ids to recover used by the
- solver. if not set, all available
- element types to be recovered.
+ Element Type ids to recover used by the solver. If not set, all available element types to be recovered.
- Parameters
- ----------
- my_solver_element_types_ids : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -233,15 +211,15 @@ def solver_element_types_ids(self):
return self._solver_element_types_ids
@property
- def streams(self):
- """Allows to connect streams input to the operator.
+ def streams(self) -> Input:
+ r"""Allows to connect streams input to the operator.
- Result file container allowed to be kept open
- to cache data.
+ Result file container allowed to be kept open to cache data.
- Parameters
- ----------
- my_streams : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -254,15 +232,15 @@ def streams(self):
return self._streams
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set.
+ Result file path container, used if no streams are set.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -295,18 +273,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._element_types_data)
@property
- def element_types_data(self):
- """Allows to get element_types_data output of the operator
+ def element_types_data(self) -> Output:
+ r"""Allows to get element_types_data output of the operator
+
+ The generic_data_container has a class_name: ElementTypesProperties. It contains the following property fields: element_routine_number: Element routine number. E.g 186 for SOLID186, keyopts: Element type option keys, kdofs: DOF/node for this element type.This is a bit mapping, nodelm: Number of nodes for this element type, nodfor: Number of nodes per element having nodal forces, nodstr: Number of nodes per element having nodal stresses, new_gen_element: Element of new generation.
Returns
- ----------
- my_element_types_data : GenericDataContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.element_types_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_element_types_data = op.outputs.element_types_data()
- """ # noqa: E501
+ """
return self._element_types_data
diff --git a/src/ansys/dpf/core/operators/metadata/integrate_over_time_freq.py b/src/ansys/dpf/core/operators/metadata/integrate_over_time_freq.py
index 468e69726d6..021eab1a30d 100644
--- a/src/ansys/dpf/core/operators/metadata/integrate_over_time_freq.py
+++ b/src/ansys/dpf/core/operators/metadata/integrate_over_time_freq.py
@@ -4,30 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class integrate_over_time_freq(Operator):
- """Integration of an input field over timefreq.
+ r"""Integration of an input field over timefreq.
+
Parameters
----------
- field : Field
- scoping : Scoping, optional
- Integrate the input field over a specific
- scoping.
- time_freq_support : TimeFreqSupport, optional
- Time freq to integrate on, otherwise time
- freq support from the input field is
- taken.
+ field: Field
+ scoping: Scoping, optional
+ Integrate the input field over a specific scoping.
+ time_freq_support: TimeFreqSupport, optional
+ Time Freq to integrate on, otherwise time freq support from the input field is taken.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -69,8 +71,9 @@ def __init__(
self.inputs.time_freq_support.connect(time_freq_support)
@staticmethod
- def _spec():
- description = """Integration of an input field over timefreq."""
+ def _spec() -> Specification:
+ description = r"""Integration of an input field over timefreq.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -78,22 +81,19 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""Integrate the input field over a specific
- scoping.""",
+ document=r"""Integrate the input field over a specific scoping.""",
),
2: PinSpecification(
name="time_freq_support",
type_names=["time_freq_support"],
optional=True,
- document="""Time freq to integrate on, otherwise time
- freq support from the input field is
- taken.""",
+ document=r"""Time Freq to integrate on, otherwise time freq support from the input field is taken.""",
),
},
map_output_pin_spec={
@@ -101,14 +101,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -117,29 +117,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="timefreq::integrate", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsIntegrateOverTimeFreq:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIntegrateOverTimeFreq
+ inputs:
+ An instance of InputsIntegrateOverTimeFreq.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIntegrateOverTimeFreq:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIntegrateOverTimeFreq
+ outputs:
+ An instance of OutputsIntegrateOverTimeFreq.
"""
return super().outputs
@@ -172,12 +179,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._time_freq_support)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -190,15 +198,15 @@ def field(self):
return self._field
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- Integrate the input field over a specific
- scoping.
+ Integrate the input field over a specific scoping.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -211,16 +219,15 @@ def scoping(self):
return self._scoping
@property
- def time_freq_support(self):
- """Allows to connect time_freq_support input to the operator.
+ def time_freq_support(self) -> Input:
+ r"""Allows to connect time_freq_support input to the operator.
- Time freq to integrate on, otherwise time
- freq support from the input field is
- taken.
+ Time Freq to integrate on, otherwise time freq support from the input field is taken.
- Parameters
- ----------
- my_time_freq_support : TimeFreqSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -251,18 +258,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.integrate_over_time_freq()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/metadata/is_cyclic.py b/src/ansys/dpf/core/operators/metadata/is_cyclic.py
index 2736b79c411..cd59ae9aa6b 100644
--- a/src/ansys/dpf/core/operators/metadata/is_cyclic.py
+++ b/src/ansys/dpf/core/operators/metadata/is_cyclic.py
@@ -4,29 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class is_cyclic(Operator):
- """Reads if the model is cyclic from the result file.
+ r"""Reads if the model is cyclic from the result file.
+
Parameters
----------
- streams_container : StreamsContainer, optional
- Streams (result file container) (optional)
- data_sources : DataSources
- If the stream is null, retrieves the file
- path from the data sources.
+ streams_container: StreamsContainer, optional
+ streams (result file container) (optional)
+ data_sources: DataSources
+ If the stream is null, retrieves the file path from the data sources.
Returns
-------
- file_path : str
- Returns 'single_stage' or 'multi_stage' or an
- empty string for non cyclic model
+ file_path: str
+ returns 'single_stage' or 'multi_stage' or an empty string for non cyclic model
Examples
--------
@@ -63,8 +66,9 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Reads if the model is cyclic from the result file."""
+ def _spec() -> Specification:
+ description = r"""Reads if the model is cyclic from the result file.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -72,14 +76,13 @@ def _spec():
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Streams (result file container) (optional)""",
+ document=r"""streams (result file container) (optional)""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""If the stream is null, retrieves the file
- path from the data sources.""",
+ document=r"""If the stream is null, retrieves the file path from the data sources.""",
),
},
map_output_pin_spec={
@@ -87,15 +90,14 @@ def _spec():
name="file_path",
type_names=["string"],
optional=False,
- document="""Returns 'single_stage' or 'multi_stage' or an
- empty string for non cyclic model""",
+ document=r"""returns 'single_stage' or 'multi_stage' or an empty string for non cyclic model""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -104,29 +106,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="is_cyclic", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsIsCyclic:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsIsCyclic
+ inputs:
+ An instance of InputsIsCyclic.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsIsCyclic:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsIsCyclic
+ outputs:
+ An instance of OutputsIsCyclic.
"""
return super().outputs
@@ -153,14 +162,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Streams (result file container) (optional)
+ streams (result file container) (optional)
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -173,15 +183,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- If the stream is null, retrieves the file
- path from the data sources.
+ If the stream is null, retrieves the file path from the data sources.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -212,18 +222,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._file_path)
@property
- def file_path(self):
- """Allows to get file_path output of the operator
+ def file_path(self) -> Output:
+ r"""Allows to get file_path output of the operator
+
+ returns 'single_stage' or 'multi_stage' or an empty string for non cyclic model
Returns
- ----------
- my_file_path : str
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.is_cyclic()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_file_path = op.outputs.file_path()
- """ # noqa: E501
+ """
return self._file_path
diff --git a/src/ansys/dpf/core/operators/metadata/material_support_provider.py b/src/ansys/dpf/core/operators/metadata/material_support_provider.py
index 05d5f7204f8..39faeeef08d 100644
--- a/src/ansys/dpf/core/operators/metadata/material_support_provider.py
+++ b/src/ansys/dpf/core/operators/metadata/material_support_provider.py
@@ -4,27 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class material_support_provider(Operator):
- """Reads the material support.
+ r"""Reads the material support.
+
Parameters
----------
- streams_container : StreamsContainer, optional
+ streams_container: StreamsContainer, optional
Streams result file container (optional).
- data_sources : DataSources
- If the stream is null, get the file path from
- the data sources.
+ data_sources: DataSources
+ if the stream is null, get the file path from the data sources.
Returns
-------
- abstract_field_support : AbstractFieldSupport
+ abstract_field_support: AbstractFieldSupport
Examples
--------
@@ -61,8 +65,9 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Reads the material support."""
+ def _spec() -> Specification:
+ description = r"""Reads the material support.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -70,14 +75,13 @@ def _spec():
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Streams result file container (optional).""",
+ document=r"""Streams result file container (optional).""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""If the stream is null, get the file path from
- the data sources.""",
+ document=r"""if the stream is null, get the file path from the data sources.""",
),
},
map_output_pin_spec={
@@ -85,14 +89,14 @@ def _spec():
name="abstract_field_support",
type_names=["abstract_field_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -101,29 +105,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mat_support_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMaterialSupportProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMaterialSupportProvider
+ inputs:
+ An instance of InputsMaterialSupportProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMaterialSupportProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMaterialSupportProvider
+ outputs:
+ An instance of OutputsMaterialSupportProvider.
"""
return super().outputs
@@ -154,14 +165,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams result file container (optional).
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -174,15 +186,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- If the stream is null, get the file path from
- the data sources.
+ if the stream is null, get the file path from the data sources.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -215,18 +227,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._abstract_field_support)
@property
- def abstract_field_support(self):
- """Allows to get abstract_field_support output of the operator
+ def abstract_field_support(self) -> Output:
+ r"""Allows to get abstract_field_support output of the operator
Returns
- ----------
- my_abstract_field_support : AbstractFieldSupport
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.material_support_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_abstract_field_support = op.outputs.abstract_field_support()
- """ # noqa: E501
+ """
return self._abstract_field_support
diff --git a/src/ansys/dpf/core/operators/metadata/mesh_info_provider.py b/src/ansys/dpf/core/operators/metadata/mesh_info_provider.py
index a9538c93cec..4fcdf3ab9dc 100644
--- a/src/ansys/dpf/core/operators/metadata/mesh_info_provider.py
+++ b/src/ansys/dpf/core/operators/metadata/mesh_info_provider.py
@@ -4,32 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class mesh_info_provider(Operator):
- """Reads the mesh information, such as number of elements (common
- property), number of faces (Cff plugins) or scoping of parts
- (LSDYNA plugins) on files contained in the streams or data
- sources.
+ r"""Reads the mesh information, such as number of elements (common
+ property), number of faces (Cff plugins) or scoping of parts (LSDYNA
+ plugins) on files contained in the streams or data sources.
+
Parameters
----------
- time_scoping : int, optional
- Optional time/frequency set id of the mesh.
- streams_container : StreamsContainer, optional
- Streams (mesh file container) (optional)
- data_sources : DataSources
- If the stream is null, retrieves the file
- path from the data sources.
+ time_scoping: int, optional
+ Optional time/frequency set ID of the mesh.
+ streams_container: StreamsContainer, optional
+ streams (mesh file container) (optional)
+ data_sources: DataSources
+ If the stream is null, retrieves the file path from the data sources.
Returns
-------
- mesh_info : GenericDataContainer
+ mesh_info: GenericDataContainer
Examples
--------
@@ -76,11 +79,11 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Reads the mesh information, such as number of elements (common
- property), number of faces (Cff plugins) or scoping of
- parts (LSDYNA plugins) on files contained in the streams
- or data sources."""
+ def _spec() -> Specification:
+ description = r"""Reads the mesh information, such as number of elements (common
+property), number of faces (Cff plugins) or scoping of parts (LSDYNA
+plugins) on files contained in the streams or data sources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -88,20 +91,19 @@ def _spec():
name="time_scoping",
type_names=["int32"],
optional=True,
- document="""Optional time/frequency set id of the mesh.""",
+ document=r"""Optional time/frequency set ID of the mesh.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Streams (mesh file container) (optional)""",
+ document=r"""streams (mesh file container) (optional)""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""If the stream is null, retrieves the file
- path from the data sources.""",
+ document=r"""If the stream is null, retrieves the file path from the data sources.""",
),
},
map_output_pin_spec={
@@ -109,7 +111,7 @@ def _spec():
name="mesh_info",
type_names=["generic_data_container"],
optional=False,
- document="""""",
+ document=r"""""",
name_derived_class=["mesh_info"],
),
},
@@ -117,7 +119,7 @@ def _spec():
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -126,29 +128,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh_info_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMeshInfoProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMeshInfoProvider
+ inputs:
+ An instance of InputsMeshInfoProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMeshInfoProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMeshInfoProvider
+ outputs:
+ An instance of OutputsMeshInfoProvider.
"""
return super().outputs
@@ -181,14 +190,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Optional time/frequency set id of the mesh.
+ Optional time/frequency set ID of the mesh.
- Parameters
- ----------
- my_time_scoping : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -201,14 +211,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Streams (mesh file container) (optional)
+ streams (mesh file container) (optional)
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -221,15 +232,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- If the stream is null, retrieves the file
- path from the data sources.
+ If the stream is null, retrieves the file path from the data sources.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -260,18 +271,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._mesh_info)
@property
- def mesh_info(self):
- """Allows to get mesh_info output of the operator
+ def mesh_info(self) -> Output:
+ r"""Allows to get mesh_info output of the operator
Returns
- ----------
- my_mesh_info : mesh_info
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.mesh_info_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mesh_info = op.outputs.mesh_info()
- """ # noqa: E501
+ """
return self._mesh_info
diff --git a/src/ansys/dpf/core/operators/metadata/mesh_property_provider.py b/src/ansys/dpf/core/operators/metadata/mesh_property_provider.py
index b8abe3a7c19..1d3732c52fd 100644
--- a/src/ansys/dpf/core/operators/metadata/mesh_property_provider.py
+++ b/src/ansys/dpf/core/operators/metadata/mesh_property_provider.py
@@ -4,52 +4,41 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class mesh_property_provider(Operator):
- """Reads a property related to the mesh, defined by its name, by calling
- the readers defined by the data sources. These properties can be
- used to fill in the mesh.
+ r"""Reads a property related to the mesh, defined by its name, by calling
+ the readers defined by the data sources. These properties can be used to
+ fill in the mesh.
+
Parameters
----------
- mesh_scoping : Scoping, optional
- Retrieves a property field on a subset of
- elements or nodes.
- streams_container : StreamsContainer, optional
- Streams (result file container) (optional)
- data_sources : DataSources
- If the stream is null, retrieves the file
- path from the data sources.
- property_name : str
- Supported property names are: "mat",
- "named_selection",
- "named_selection_names",
- "apdl_element_type", "section",
- "elprops", "keyopt_1" to "keyopt_18".
- property_identifier : int or str, optional
- Retrieves a property at a given index or by
- name. for example, a named
- selection's number or a named
- selection's name.
+ mesh_scoping: Scoping, optional
+ Retrieves a property field on a subset of elements or nodes.
+ streams_container: StreamsContainer, optional
+ streams (result file container) (optional)
+ data_sources: DataSources
+ If the stream is null, retrieves the file path from the data sources.
+ property_name: str
+ Supported property names are: "mat", "named_selection", "named_selection_names", "apdl_element_type", "section", "elprops", "keyopt_1" to "keyopt_18".
+ property_identifier: int or str, optional
+ Retrieves a property at a given index or by name. For example, a named selection's number or a named selection's name.
Returns
-------
- property : Scoping or PropertyField or StringField
- Returns a property field for properties:
- "mat", "apdl_element_type",
- "section", "elprops", "keyopt_1" to
- "keyopt_18" (or any mesh's property
- field), a scoping for
- properties:"named_selection", a
- string field for properties:
- "named_selection_names".
+ property: Scoping or PropertyField or StringField
+ Returns a property field for properties: "mat", "apdl_element_type", "section", "elprops", "keyopt_1" to "keyopt_18" (or any mesh's property field), a scoping for properties:"named_selection", a string field for properties: "named_selection_names".
Examples
--------
@@ -108,10 +97,11 @@ def __init__(
self.inputs.property_identifier.connect(property_identifier)
@staticmethod
- def _spec():
- description = """Reads a property related to the mesh, defined by its name, by calling
- the readers defined by the data sources. These properties
- can be used to fill in the mesh."""
+ def _spec() -> Specification:
+ description = r"""Reads a property related to the mesh, defined by its name, by calling
+the readers defined by the data sources. These properties can be used to
+fill in the mesh.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -119,40 +109,31 @@ def _spec():
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""Retrieves a property field on a subset of
- elements or nodes.""",
+ document=r"""Retrieves a property field on a subset of elements or nodes.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Streams (result file container) (optional)""",
+ document=r"""streams (result file container) (optional)""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""If the stream is null, retrieves the file
- path from the data sources.""",
+ document=r"""If the stream is null, retrieves the file path from the data sources.""",
),
13: PinSpecification(
name="property_name",
type_names=["string"],
optional=False,
- document="""Supported property names are: "mat",
- "named_selection",
- "named_selection_names",
- "apdl_element_type", "section",
- "elprops", "keyopt_1" to "keyopt_18".""",
+ document=r"""Supported property names are: "mat", "named_selection", "named_selection_names", "apdl_element_type", "section", "elprops", "keyopt_1" to "keyopt_18".""",
),
17: PinSpecification(
name="property_identifier",
type_names=["int32", "string"],
optional=True,
- document="""Retrieves a property at a given index or by
- name. for example, a named
- selection's number or a named
- selection's name.""",
+ document=r"""Retrieves a property at a given index or by name. For example, a named selection's number or a named selection's name.""",
),
},
map_output_pin_spec={
@@ -160,21 +141,14 @@ def _spec():
name="property",
type_names=["scoping", "property_field", "string_field"],
optional=False,
- document="""Returns a property field for properties:
- "mat", "apdl_element_type",
- "section", "elprops", "keyopt_1" to
- "keyopt_18" (or any mesh's property
- field), a scoping for
- properties:"named_selection", a
- string field for properties:
- "named_selection_names".""",
+ document=r"""Returns a property field for properties: "mat", "apdl_element_type", "section", "elprops", "keyopt_1" to "keyopt_18" (or any mesh's property field), a scoping for properties:"named_selection", a string field for properties: "named_selection_names".""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -183,29 +157,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh_property_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMeshPropertyProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMeshPropertyProvider
+ inputs:
+ An instance of InputsMeshPropertyProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMeshPropertyProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMeshPropertyProvider
+ outputs:
+ An instance of OutputsMeshPropertyProvider.
"""
return super().outputs
@@ -254,15 +235,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._property_identifier)
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Retrieves a property field on a subset of
- elements or nodes.
+ Retrieves a property field on a subset of elements or nodes.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -275,14 +256,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Streams (result file container) (optional)
+ streams (result file container) (optional)
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -295,15 +277,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- If the stream is null, retrieves the file
- path from the data sources.
+ If the stream is null, retrieves the file path from the data sources.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -316,18 +298,15 @@ def data_sources(self):
return self._data_sources
@property
- def property_name(self):
- """Allows to connect property_name input to the operator.
+ def property_name(self) -> Input:
+ r"""Allows to connect property_name input to the operator.
- Supported property names are: "mat",
- "named_selection",
- "named_selection_names",
- "apdl_element_type", "section",
- "elprops", "keyopt_1" to "keyopt_18".
+ Supported property names are: "mat", "named_selection", "named_selection_names", "apdl_element_type", "section", "elprops", "keyopt_1" to "keyopt_18".
- Parameters
- ----------
- my_property_name : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -340,17 +319,15 @@ def property_name(self):
return self._property_name
@property
- def property_identifier(self):
- """Allows to connect property_identifier input to the operator.
+ def property_identifier(self) -> Input:
+ r"""Allows to connect property_identifier input to the operator.
- Retrieves a property at a given index or by
- name. for example, a named
- selection's number or a named
- selection's name.
+ Retrieves a property at a given index or by name. For example, a named selection's number or a named selection's name.
- Parameters
- ----------
- my_property_identifier : int or str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
diff --git a/src/ansys/dpf/core/operators/metadata/mesh_selection_manager_provider.py b/src/ansys/dpf/core/operators/metadata/mesh_selection_manager_provider.py
index da34d928b3e..0fb7ff71aed 100644
--- a/src/ansys/dpf/core/operators/metadata/mesh_selection_manager_provider.py
+++ b/src/ansys/dpf/core/operators/metadata/mesh_selection_manager_provider.py
@@ -4,29 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class mesh_selection_manager_provider(Operator):
- """Reads mesh properties from the results files contained in the streams
- or data sources, and makes those properties available through a
- mesh selection manager in output.
+ r"""Reads mesh properties from the results files contained in the streams or
+ data sources, and makes those properties available through a mesh
+ selection manager in output.
+
Parameters
----------
- streams_container : StreamsContainer, optional
- Streams (result file container) (optional)
- data_sources : DataSources
- If the stream is null, retrieves the file
- path from the data sources.
+ streams_container: StreamsContainer, optional
+ streams (result file container) (optional)
+ data_sources: DataSources
+ If the stream is null, retrieves the file path from the data sources.
Returns
-------
- mesh_selection_manager : MeshSelectionManager
+ mesh_selection_manager: MeshSelectionManager
Examples
--------
@@ -65,10 +69,11 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Reads mesh properties from the results files contained in the streams
- or data sources, and makes those properties available
- through a mesh selection manager in output."""
+ def _spec() -> Specification:
+ description = r"""Reads mesh properties from the results files contained in the streams or
+data sources, and makes those properties available through a mesh
+selection manager in output.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -76,14 +81,13 @@ def _spec():
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Streams (result file container) (optional)""",
+ document=r"""streams (result file container) (optional)""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""If the stream is null, retrieves the file
- path from the data sources.""",
+ document=r"""If the stream is null, retrieves the file path from the data sources.""",
),
},
map_output_pin_spec={
@@ -91,14 +95,14 @@ def _spec():
name="mesh_selection_manager",
type_names=["mesh_selection_manager"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -107,31 +111,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="MeshSelectionManagerProvider", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsMeshSelectionManagerProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMeshSelectionManagerProvider
+ inputs:
+ An instance of InputsMeshSelectionManagerProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMeshSelectionManagerProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMeshSelectionManagerProvider
+ outputs:
+ An instance of OutputsMeshSelectionManagerProvider.
"""
return super().outputs
@@ -162,14 +173,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Streams (result file container) (optional)
+ streams (result file container) (optional)
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -182,15 +194,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- If the stream is null, retrieves the file
- path from the data sources.
+ If the stream is null, retrieves the file path from the data sources.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -223,18 +235,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._mesh_selection_manager)
@property
- def mesh_selection_manager(self):
- """Allows to get mesh_selection_manager output of the operator
+ def mesh_selection_manager(self) -> Output:
+ r"""Allows to get mesh_selection_manager output of the operator
Returns
- ----------
- my_mesh_selection_manager : MeshSelectionManager
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.mesh_selection_manager_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mesh_selection_manager = op.outputs.mesh_selection_manager()
- """ # noqa: E501
+ """
return self._mesh_selection_manager
diff --git a/src/ansys/dpf/core/operators/metadata/mesh_support_provider.py b/src/ansys/dpf/core/operators/metadata/mesh_support_provider.py
index 57f31304197..21e1397f167 100644
--- a/src/ansys/dpf/core/operators/metadata/mesh_support_provider.py
+++ b/src/ansys/dpf/core/operators/metadata/mesh_support_provider.py
@@ -4,27 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class mesh_support_provider(Operator):
- """Reads the mesh support.
+ r"""Reads the mesh support.
+
Parameters
----------
- streams_container : StreamsContainer, optional
+ streams_container: StreamsContainer, optional
Streams (result file container) (optional).
- data_sources : DataSources
- If the stream is null, retrieves the file
- path from the data sources.
+ data_sources: DataSources
+ If the stream is null, retrieves the file path from the data sources.
Returns
-------
- abstract_field_support : AbstractFieldSupport
+ abstract_field_support: AbstractFieldSupport
Examples
--------
@@ -61,8 +65,9 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Reads the mesh support."""
+ def _spec() -> Specification:
+ description = r"""Reads the mesh support.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -70,14 +75,13 @@ def _spec():
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Streams (result file container) (optional).""",
+ document=r"""Streams (result file container) (optional).""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""If the stream is null, retrieves the file
- path from the data sources.""",
+ document=r"""If the stream is null, retrieves the file path from the data sources.""",
),
},
map_output_pin_spec={
@@ -85,14 +89,14 @@ def _spec():
name="abstract_field_support",
type_names=["abstract_field_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -101,29 +105,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mesh_support_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMeshSupportProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMeshSupportProvider
+ inputs:
+ An instance of InputsMeshSupportProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMeshSupportProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMeshSupportProvider
+ outputs:
+ An instance of OutputsMeshSupportProvider.
"""
return super().outputs
@@ -154,14 +165,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams (result file container) (optional).
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -174,15 +186,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- If the stream is null, retrieves the file
- path from the data sources.
+ If the stream is null, retrieves the file path from the data sources.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -215,18 +227,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._abstract_field_support)
@property
- def abstract_field_support(self):
- """Allows to get abstract_field_support output of the operator
+ def abstract_field_support(self) -> Output:
+ r"""Allows to get abstract_field_support output of the operator
Returns
- ----------
- my_abstract_field_support : AbstractFieldSupport
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.mesh_support_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_abstract_field_support = op.outputs.abstract_field_support()
- """ # noqa: E501
+ """
return self._abstract_field_support
diff --git a/src/ansys/dpf/core/operators/metadata/property_field_provider_by_name.py b/src/ansys/dpf/core/operators/metadata/property_field_provider_by_name.py
index 9ab1e7863e5..213a69e7fb1 100644
--- a/src/ansys/dpf/core/operators/metadata/property_field_provider_by_name.py
+++ b/src/ansys/dpf/core/operators/metadata/property_field_provider_by_name.py
@@ -4,43 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class property_field_provider_by_name(Operator):
- """Provides the property values for a set of elements for a defined
+ r"""Provides the property values for a set of elements for a defined
property name.
+
Parameters
----------
- mesh_scoping : Scoping, optional
- Scoping that defines the set of elements to
- fetch the property values for. if not
- specified, applied on all the
- elements of the mesh.
- streams_container : StreamsContainer, optional
- Optional if using a datasources
- data_sources : DataSources
- Optional if using a streamscontainer
- property_name : str
- Property to read, that can be the following:
- elements_connectivity,
- nodes_connectivity, material,
- element_type, apdl_section_id,
- apdl_real_id, apdl_esys_id,
- mapdl_element_type,
- mapdl_element_type_id,
- harmonic_index, step, substep,
- keyopt_i (i = 1 -> 18).
+ mesh_scoping: Scoping, optional
+ scoping that defines the set of elements to fetch the property values for. If not specified, applied on all the elements of the mesh.
+ streams_container: StreamsContainer, optional
+ optional if using a dataSources
+ data_sources: DataSources
+ optional if using a streamsContainer
+ property_name: str
+ property to read, that can be the following: elements_connectivity, nodes_connectivity, material, element_type, apdl_section_id, apdl_real_id, apdl_esys_id, mapdl_element_type, mapdl_element_type_id, harmonic_index, step, substep, keyopt_i (i = 1 -> 18).
Returns
-------
- property_field : PropertyField
- Property field
+ property_field: PropertyField
+ property field
Examples
--------
@@ -95,9 +89,10 @@ def __init__(
self.inputs.property_name.connect(property_name)
@staticmethod
- def _spec():
- description = """Provides the property values for a set of elements for a defined
- property name."""
+ def _spec() -> Specification:
+ description = r"""Provides the property values for a set of elements for a defined
+property name.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -105,36 +100,25 @@ def _spec():
name="mesh_scoping",
type_names=["scoping"],
optional=True,
- document="""Scoping that defines the set of elements to
- fetch the property values for. if not
- specified, applied on all the
- elements of the mesh.""",
+ document=r"""scoping that defines the set of elements to fetch the property values for. If not specified, applied on all the elements of the mesh.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Optional if using a datasources""",
+ document=r"""optional if using a dataSources""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Optional if using a streamscontainer""",
+ document=r"""optional if using a streamsContainer""",
),
13: PinSpecification(
name="property_name",
type_names=["string"],
optional=False,
- document="""Property to read, that can be the following:
- elements_connectivity,
- nodes_connectivity, material,
- element_type, apdl_section_id,
- apdl_real_id, apdl_esys_id,
- mapdl_element_type,
- mapdl_element_type_id,
- harmonic_index, step, substep,
- keyopt_i (i = 1 -> 18).""",
+ document=r"""property to read, that can be the following: elements_connectivity, nodes_connectivity, material, element_type, apdl_section_id, apdl_real_id, apdl_esys_id, mapdl_element_type, mapdl_element_type_id, harmonic_index, step, substep, keyopt_i (i = 1 -> 18).""",
),
},
map_output_pin_spec={
@@ -142,14 +126,14 @@ def _spec():
name="property_field",
type_names=["property_field"],
optional=False,
- document="""Property field""",
+ document=r"""property field""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -158,31 +142,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="property_field_provider_by_name", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsPropertyFieldProviderByName:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsPropertyFieldProviderByName
+ inputs:
+ An instance of InputsPropertyFieldProviderByName.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsPropertyFieldProviderByName:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsPropertyFieldProviderByName
+ outputs:
+ An instance of OutputsPropertyFieldProviderByName.
"""
return super().outputs
@@ -225,17 +216,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._property_name)
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Scoping that defines the set of elements to
- fetch the property values for. if not
- specified, applied on all the
- elements of the mesh.
+ scoping that defines the set of elements to fetch the property values for. If not specified, applied on all the elements of the mesh.
- Parameters
- ----------
- my_mesh_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -248,14 +237,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Optional if using a datasources
+ optional if using a dataSources
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -268,14 +258,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Optional if using a streamscontainer
+ optional if using a streamsContainer
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -288,22 +279,15 @@ def data_sources(self):
return self._data_sources
@property
- def property_name(self):
- """Allows to connect property_name input to the operator.
-
- Property to read, that can be the following:
- elements_connectivity,
- nodes_connectivity, material,
- element_type, apdl_section_id,
- apdl_real_id, apdl_esys_id,
- mapdl_element_type,
- mapdl_element_type_id,
- harmonic_index, step, substep,
- keyopt_i (i = 1 -> 18).
+ def property_name(self) -> Input:
+ r"""Allows to connect property_name input to the operator.
- Parameters
- ----------
- my_property_name : str
+ property to read, that can be the following: elements_connectivity, nodes_connectivity, material, element_type, apdl_section_id, apdl_real_id, apdl_esys_id, mapdl_element_type, mapdl_element_type_id, harmonic_index, step, substep, keyopt_i (i = 1 -> 18).
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -336,18 +320,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._property_field)
@property
- def property_field(self):
- """Allows to get property_field output of the operator
+ def property_field(self) -> Output:
+ r"""Allows to get property_field output of the operator
+
+ property field
Returns
- ----------
- my_property_field : PropertyField
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.property_field_provider_by_name()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_property_field = op.outputs.property_field()
- """ # noqa: E501
+ """
return self._property_field
diff --git a/src/ansys/dpf/core/operators/metadata/real_constants_provider.py b/src/ansys/dpf/core/operators/metadata/real_constants_provider.py
index 78efd6d52c4..bc4cadc93fa 100644
--- a/src/ansys/dpf/core/operators/metadata/real_constants_provider.py
+++ b/src/ansys/dpf/core/operators/metadata/real_constants_provider.py
@@ -4,34 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class real_constants_provider(Operator):
- """Reads real constants from the result files contained in the streams or
+ r"""Reads real constants from the result files contained in the streams or
data sources.
+
Parameters
----------
- solver_real_constants_ids : int, optional
- Real constant ids to recover used by the
- solver. if not set, all available
- real constants to be recovered.
- streams : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data.
- data_sources : DataSources
- Result file path container, used if no
- streams are set.
+ solver_real_constants_ids: int, optional
+ Real Constant ids to recover used by the solver. If not set, all available real constants to be recovered.
+ streams: StreamsContainer, optional
+ Result file container allowed to be kept open to cache data.
+ data_sources: DataSources
+ Result file path container, used if no streams are set.
Returns
-------
- real_constants1 : Field
- real_constants2 : Field
+ real_constants1: Field
+ real_constants2: Field
Examples
--------
@@ -79,9 +80,10 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Reads real constants from the result files contained in the streams or
- data sources."""
+ def _spec() -> Specification:
+ description = r"""Reads real constants from the result files contained in the streams or
+data sources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -89,23 +91,19 @@ def _spec():
name="solver_real_constants_ids",
type_names=["int32", "vector"],
optional=True,
- document="""Real constant ids to recover used by the
- solver. if not set, all available
- real constants to be recovered.""",
+ document=r"""Real Constant ids to recover used by the solver. If not set, all available real constants to be recovered.""",
),
3: PinSpecification(
name="streams",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data.""",
+ document=r"""Result file container allowed to be kept open to cache data.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set.""",
+ document=r"""Result file path container, used if no streams are set.""",
),
},
map_output_pin_spec={
@@ -113,20 +111,20 @@ def _spec():
name="real_constants1",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="real_constants2",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -135,29 +133,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="real_constants_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsRealConstantsProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsRealConstantsProvider
+ inputs:
+ An instance of InputsRealConstantsProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsRealConstantsProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsRealConstantsProvider
+ outputs:
+ An instance of OutputsRealConstantsProvider.
"""
return super().outputs
@@ -192,16 +197,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def solver_real_constants_ids(self):
- """Allows to connect solver_real_constants_ids input to the operator.
+ def solver_real_constants_ids(self) -> Input:
+ r"""Allows to connect solver_real_constants_ids input to the operator.
- Real constant ids to recover used by the
- solver. if not set, all available
- real constants to be recovered.
+ Real Constant ids to recover used by the solver. If not set, all available real constants to be recovered.
- Parameters
- ----------
- my_solver_real_constants_ids : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -214,15 +218,15 @@ def solver_real_constants_ids(self):
return self._solver_real_constants_ids
@property
- def streams(self):
- """Allows to connect streams input to the operator.
+ def streams(self) -> Input:
+ r"""Allows to connect streams input to the operator.
- Result file container allowed to be kept open
- to cache data.
+ Result file container allowed to be kept open to cache data.
- Parameters
- ----------
- my_streams : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -235,15 +239,15 @@ def streams(self):
return self._streams
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set.
+ Result file path container, used if no streams are set.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -281,35 +285,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._real_constants2)
@property
- def real_constants1(self):
- """Allows to get real_constants1 output of the operator
+ def real_constants1(self) -> Output:
+ r"""Allows to get real_constants1 output of the operator
Returns
- ----------
- my_real_constants1 : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.real_constants_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_real_constants1 = op.outputs.real_constants1()
- """ # noqa: E501
+ """
return self._real_constants1
@property
- def real_constants2(self):
- """Allows to get real_constants2 output of the operator
+ def real_constants2(self) -> Output:
+ r"""Allows to get real_constants2 output of the operator
Returns
- ----------
- my_real_constants2 : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.real_constants_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_real_constants2 = op.outputs.real_constants2()
- """ # noqa: E501
+ """
return self._real_constants2
diff --git a/src/ansys/dpf/core/operators/metadata/result_info_provider.py b/src/ansys/dpf/core/operators/metadata/result_info_provider.py
index 61e16c12689..0bce27af9d1 100644
--- a/src/ansys/dpf/core/operators/metadata/result_info_provider.py
+++ b/src/ansys/dpf/core/operators/metadata/result_info_provider.py
@@ -4,29 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class result_info_provider(Operator):
- """Reads the result information, such as available results or unit
- systems from the results files contained in the streams or data
- sources.
+ r"""Reads the result information, such as available results or unit systems
+ from the results files contained in the streams or data sources.
+
Parameters
----------
- streams_container : StreamsContainer, optional
- Streams (result file container) (optional)
- data_sources : DataSources
- If the stream is null, retrieves the file
- path from the data sources.
+ streams_container: StreamsContainer, optional
+ streams (result file container) (optional)
+ data_sources: DataSources
+ If the stream is null, retrieves the file path from the data sources.
Returns
-------
- result_info : ResultInfo
+ result_info: ResultInfo
Examples
--------
@@ -63,10 +66,10 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Reads the result information, such as available results or unit
- systems from the results files contained in the streams or
- data sources."""
+ def _spec() -> Specification:
+ description = r"""Reads the result information, such as available results or unit systems
+from the results files contained in the streams or data sources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -74,14 +77,13 @@ def _spec():
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Streams (result file container) (optional)""",
+ document=r"""streams (result file container) (optional)""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""If the stream is null, retrieves the file
- path from the data sources.""",
+ document=r"""If the stream is null, retrieves the file path from the data sources.""",
),
},
map_output_pin_spec={
@@ -89,14 +91,14 @@ def _spec():
name="result_info",
type_names=["result_info"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -105,29 +107,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="result_info_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsResultInfoProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsResultInfoProvider
+ inputs:
+ An instance of InputsResultInfoProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsResultInfoProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsResultInfoProvider
+ outputs:
+ An instance of OutputsResultInfoProvider.
"""
return super().outputs
@@ -156,14 +165,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Streams (result file container) (optional)
+ streams (result file container) (optional)
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -176,15 +186,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- If the stream is null, retrieves the file
- path from the data sources.
+ If the stream is null, retrieves the file path from the data sources.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -215,18 +225,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._result_info)
@property
- def result_info(self):
- """Allows to get result_info output of the operator
+ def result_info(self) -> Output:
+ r"""Allows to get result_info output of the operator
Returns
- ----------
- my_result_info : ResultInfo
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.result_info_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_result_info = op.outputs.result_info()
- """ # noqa: E501
+ """
return self._result_info
diff --git a/src/ansys/dpf/core/operators/metadata/streams_provider.py b/src/ansys/dpf/core/operators/metadata/streams_provider.py
index 04db0517043..4abcd298828 100644
--- a/src/ansys/dpf/core/operators/metadata/streams_provider.py
+++ b/src/ansys/dpf/core/operators/metadata/streams_provider.py
@@ -4,23 +4,28 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class streams_provider(Operator):
- """Creates streams (files with cache) from the data sources.
+ r"""Creates streams (files with cache) from the data sources.
+
Parameters
----------
- data_sources : DataSources
+ data_sources: DataSources
Returns
-------
- streams_container : StreamsContainer
+ streams_container: StreamsContainer
Examples
--------
@@ -50,8 +55,9 @@ def __init__(self, data_sources=None, config=None, server=None):
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Creates streams (files with cache) from the data sources."""
+ def _spec() -> Specification:
+ description = r"""Creates streams (files with cache) from the data sources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -59,7 +65,7 @@ def _spec():
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -67,14 +73,14 @@ def _spec():
name="streams_container",
type_names=["streams_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -83,29 +89,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="stream_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsStreamsProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsStreamsProvider
+ inputs:
+ An instance of InputsStreamsProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsStreamsProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsStreamsProvider
+ outputs:
+ An instance of OutputsStreamsProvider.
"""
return super().outputs
@@ -128,12 +141,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -164,18 +178,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._streams_container)
@property
- def streams_container(self):
- """Allows to get streams_container output of the operator
+ def streams_container(self) -> Output:
+ r"""Allows to get streams_container output of the operator
Returns
- ----------
- my_streams_container : StreamsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.streams_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_streams_container = op.outputs.streams_container()
- """ # noqa: E501
+ """
return self._streams_container
diff --git a/src/ansys/dpf/core/operators/metadata/time_freq_provider.py b/src/ansys/dpf/core/operators/metadata/time_freq_provider.py
index 9f73dfe5030..3bfb939845e 100644
--- a/src/ansys/dpf/core/operators/metadata/time_freq_provider.py
+++ b/src/ansys/dpf/core/operators/metadata/time_freq_provider.py
@@ -4,28 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class time_freq_provider(Operator):
- """Reads the time/frequency support from the results files contained in
- the streams or data sources.
+ r"""Reads the time/frequency support from the results files contained in the
+ streams or data sources.
+
Parameters
----------
- streams_container : StreamsContainer, optional
- Streams (result file container) (optional)
- data_sources : DataSources
- If the stream is null, retrieves the file
- path from the data sources.
+ streams_container: StreamsContainer, optional
+ streams (result file container) (optional)
+ data_sources: DataSources
+ If the stream is null, retrieves the file path from the data sources.
Returns
-------
- time_freq_support : TimeFreqSupport
+ time_freq_support: TimeFreqSupport
Examples
--------
@@ -64,9 +68,10 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Reads the time/frequency support from the results files contained in
- the streams or data sources."""
+ def _spec() -> Specification:
+ description = r"""Reads the time/frequency support from the results files contained in the
+streams or data sources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -74,14 +79,13 @@ def _spec():
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Streams (result file container) (optional)""",
+ document=r"""streams (result file container) (optional)""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""If the stream is null, retrieves the file
- path from the data sources.""",
+ document=r"""If the stream is null, retrieves the file path from the data sources.""",
),
},
map_output_pin_spec={
@@ -89,14 +93,14 @@ def _spec():
name="time_freq_support",
type_names=["time_freq_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -105,29 +109,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="time_freq_support_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsTimeFreqProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTimeFreqProvider
+ inputs:
+ An instance of InputsTimeFreqProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTimeFreqProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTimeFreqProvider
+ outputs:
+ An instance of OutputsTimeFreqProvider.
"""
return super().outputs
@@ -156,14 +167,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Streams (result file container) (optional)
+ streams (result file container) (optional)
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -176,15 +188,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- If the stream is null, retrieves the file
- path from the data sources.
+ If the stream is null, retrieves the file path from the data sources.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -217,18 +229,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._time_freq_support)
@property
- def time_freq_support(self):
- """Allows to get time_freq_support output of the operator
+ def time_freq_support(self) -> Output:
+ r"""Allows to get time_freq_support output of the operator
Returns
- ----------
- my_time_freq_support : TimeFreqSupport
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.metadata.time_freq_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_time_freq_support = op.outputs.time_freq_support()
- """ # noqa: E501
+ """
return self._time_freq_support
diff --git a/src/ansys/dpf/core/operators/metadata/time_freq_support_get_attribute.py b/src/ansys/dpf/core/operators/metadata/time_freq_support_get_attribute.py
index d2ad159d09c..27b7f057b5f 100644
--- a/src/ansys/dpf/core/operators/metadata/time_freq_support_get_attribute.py
+++ b/src/ansys/dpf/core/operators/metadata/time_freq_support_get_attribute.py
@@ -4,44 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.outputs import _modify_output_spec_with_one_type
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class time_freq_support_get_attribute(Operator):
- """Uses the TimeFreqSupport APIs to return a given attribute of the
- scoping in input.
+ r"""Uses the TimeFreqSupport APIs to return a given attribute of the scoping
+ in input.
+
Parameters
----------
- time_freq_support : TimeFreqSupport
- property_name : str
- Supported property names are: "time_freqs",
- "imaginary_freqs",
- "frequency_tolerance", "set_id",
- "cummulative_index", "sets_freqs".
- property_identifier : int, optional
- Additional pin for properties "set_id" and
- "cummulative_index": the step id, for
- "sets_freqs": the sets scoping.
- property_identifier_2 : int, optional
- Additional pin for properties "set_id" and
- "cummulative_index": the substep id
- (if none, last substep is
- considered).
+ time_freq_support: TimeFreqSupport
+ property_name: str
+ Supported property names are: "time_freqs", "imaginary_freqs", "frequency_tolerance", "set_id", "cummulative_index", "sets_freqs".
+ property_identifier: int, optional
+ Additional pin for properties "set_id" and "cummulative_index": the step id, for "sets_freqs": the sets scoping.
+ property_identifier_2: int, optional
+ Additional pin for properties "set_id" and "cummulative_index": the substep id (if none, last substep is considered).
Returns
-------
- property : float or Field or Scoping
- Returns a double for property:
- "frequency_tolerance", a single-value
- scoping for properties for "set_id"
- and "cummulative_index", and a field
- otherwise.
+ property: float or Field or Scoping
+ Returns a double for property: "frequency_tolerance", a single-value Scoping for properties for "set_id" and "cummulative_index", and a Field otherwise.
Examples
--------
@@ -96,9 +89,10 @@ def __init__(
self.inputs.property_identifier_2.connect(property_identifier_2)
@staticmethod
- def _spec():
- description = """Uses the TimeFreqSupport APIs to return a given attribute of the
- scoping in input."""
+ def _spec() -> Specification:
+ description = r"""Uses the TimeFreqSupport APIs to return a given attribute of the scoping
+in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -106,33 +100,25 @@ def _spec():
name="time_freq_support",
type_names=["time_freq_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="property_name",
type_names=["string"],
optional=False,
- document="""Supported property names are: "time_freqs",
- "imaginary_freqs",
- "frequency_tolerance", "set_id",
- "cummulative_index", "sets_freqs".""",
+ document=r"""Supported property names are: "time_freqs", "imaginary_freqs", "frequency_tolerance", "set_id", "cummulative_index", "sets_freqs".""",
),
2: PinSpecification(
name="property_identifier",
type_names=["int32"],
optional=True,
- document="""Additional pin for properties "set_id" and
- "cummulative_index": the step id, for
- "sets_freqs": the sets scoping.""",
+ document=r"""Additional pin for properties "set_id" and "cummulative_index": the step id, for "sets_freqs": the sets scoping.""",
),
3: PinSpecification(
name="property_identifier_2",
type_names=["int32"],
optional=True,
- document="""Additional pin for properties "set_id" and
- "cummulative_index": the substep id
- (if none, last substep is
- considered).""",
+ document=r"""Additional pin for properties "set_id" and "cummulative_index": the substep id (if none, last substep is considered).""",
),
},
map_output_pin_spec={
@@ -140,18 +126,14 @@ def _spec():
name="property",
type_names=["double", "field", "scoping"],
optional=False,
- document="""Returns a double for property:
- "frequency_tolerance", a single-value
- scoping for properties for "set_id"
- and "cummulative_index", and a field
- otherwise.""",
+ document=r"""Returns a double for property: "frequency_tolerance", a single-value Scoping for properties for "set_id" and "cummulative_index", and a Field otherwise.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -160,31 +142,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="timefreqsupport::get_attribute", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsTimeFreqSupportGetAttribute:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTimeFreqSupportGetAttribute
+ inputs:
+ An instance of InputsTimeFreqSupportGetAttribute.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTimeFreqSupportGetAttribute:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTimeFreqSupportGetAttribute
+ outputs:
+ An instance of OutputsTimeFreqSupportGetAttribute.
"""
return super().outputs
@@ -227,12 +216,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._property_identifier_2)
@property
- def time_freq_support(self):
- """Allows to connect time_freq_support input to the operator.
+ def time_freq_support(self) -> Input:
+ r"""Allows to connect time_freq_support input to the operator.
- Parameters
- ----------
- my_time_freq_support : TimeFreqSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -245,17 +235,15 @@ def time_freq_support(self):
return self._time_freq_support
@property
- def property_name(self):
- """Allows to connect property_name input to the operator.
+ def property_name(self) -> Input:
+ r"""Allows to connect property_name input to the operator.
- Supported property names are: "time_freqs",
- "imaginary_freqs",
- "frequency_tolerance", "set_id",
- "cummulative_index", "sets_freqs".
+ Supported property names are: "time_freqs", "imaginary_freqs", "frequency_tolerance", "set_id", "cummulative_index", "sets_freqs".
- Parameters
- ----------
- my_property_name : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -268,16 +256,15 @@ def property_name(self):
return self._property_name
@property
- def property_identifier(self):
- """Allows to connect property_identifier input to the operator.
+ def property_identifier(self) -> Input:
+ r"""Allows to connect property_identifier input to the operator.
- Additional pin for properties "set_id" and
- "cummulative_index": the step id, for
- "sets_freqs": the sets scoping.
+ Additional pin for properties "set_id" and "cummulative_index": the step id, for "sets_freqs": the sets scoping.
- Parameters
- ----------
- my_property_identifier : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -290,17 +277,15 @@ def property_identifier(self):
return self._property_identifier
@property
- def property_identifier_2(self):
- """Allows to connect property_identifier_2 input to the operator.
+ def property_identifier_2(self) -> Input:
+ r"""Allows to connect property_identifier_2 input to the operator.
- Additional pin for properties "set_id" and
- "cummulative_index": the substep id
- (if none, last substep is
- considered).
+ Additional pin for properties "set_id" and "cummulative_index": the substep id (if none, last substep is considered).
- Parameters
- ----------
- my_property_identifier_2 : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
diff --git a/src/ansys/dpf/core/operators/min_max/max_by_component.py b/src/ansys/dpf/core/operators/min_max/max_by_component.py
index c474a8ad768..ef25586eb6f 100644
--- a/src/ansys/dpf/core/operators/min_max/max_by_component.py
+++ b/src/ansys/dpf/core/operators/min_max/max_by_component.py
@@ -4,30 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class max_by_component(Operator):
- """Give the maximum for each element rank by comparing several fields.
+ r"""Give the maximum for each element rank by comparing several fields.
+
Parameters
----------
- use_absolute_value : bool
- Use_absolute_value
- field1 : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- field2 : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ use_absolute_value: bool
+ use_absolute_value
+ field1: Field or FieldsContainer
+ field or fields container with only one field is expected
+ field2: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -74,10 +77,9 @@ def __init__(
self.inputs.field2.connect(field2)
@staticmethod
- def _spec():
- description = (
- """Give the maximum for each element rank by comparing several fields."""
- )
+ def _spec() -> Specification:
+ description = r"""Give the maximum for each element rank by comparing several fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -85,21 +87,19 @@ def _spec():
name="use_absolute_value",
type_names=["bool"],
optional=False,
- document="""Use_absolute_value""",
+ document=r"""use_absolute_value""",
),
1: PinSpecification(
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
2: PinSpecification(
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -107,14 +107,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -123,29 +123,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="max_by_component", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMaxByComponent:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMaxByComponent
+ inputs:
+ An instance of InputsMaxByComponent.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMaxByComponent:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMaxByComponent
+ outputs:
+ An instance of OutputsMaxByComponent.
"""
return super().outputs
@@ -178,14 +185,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field2)
@property
- def use_absolute_value(self):
- """Allows to connect use_absolute_value input to the operator.
+ def use_absolute_value(self) -> Input:
+ r"""Allows to connect use_absolute_value input to the operator.
- Use_absolute_value
+ use_absolute_value
- Parameters
- ----------
- my_use_absolute_value : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -198,15 +206,15 @@ def use_absolute_value(self):
return self._use_absolute_value
@property
- def field1(self):
- """Allows to connect field1 input to the operator.
+ def field1(self) -> Input:
+ r"""Allows to connect field1 input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field1 : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -219,15 +227,15 @@ def field1(self):
return self._field1
@property
- def field2(self):
- """Allows to connect field2 input to the operator.
+ def field2(self) -> Input:
+ r"""Allows to connect field2 input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field2 : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -258,18 +266,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.max_by_component()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/min_max/max_over_phase.py b/src/ansys/dpf/core/operators/min_max/max_over_phase.py
index d092cc3129e..927f1a00f2e 100644
--- a/src/ansys/dpf/core/operators/min_max/max_over_phase.py
+++ b/src/ansys/dpf/core/operators/min_max/max_over_phase.py
@@ -4,30 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class max_over_phase(Operator):
- """Returns, for each entity, the maximum value of (real value *
- cos(theta) - imaginary value * sin(theta)) for theta in [0,
- 360]degrees with the increment in input.
+ r"""Returns, for each entity, the maximum value of (real value \* cos(theta)
+ - imaginary value \* sin(theta)) for theta in [0, 360]degrees with the
+ increment in input.
+
Parameters
----------
- real_field : Field
- imaginary_field : Field
- abs_value : bool, optional
+ real_field: Field
+ imaginary_field: Field
+ abs_value: bool, optional
Should use absolute value.
- phase_increment : float, optional
+ phase_increment: float, optional
Phase increment (default is 10.0 degrees).
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -80,10 +85,11 @@ def __init__(
self.inputs.phase_increment.connect(phase_increment)
@staticmethod
- def _spec():
- description = """Returns, for each entity, the maximum value of (real value *
- cos(theta) - imaginary value * sin(theta)) for theta in
- [0, 360]degrees with the increment in input."""
+ def _spec() -> Specification:
+ description = r"""Returns, for each entity, the maximum value of (real value \* cos(theta)
+- imaginary value \* sin(theta)) for theta in [0, 360]degrees with the
+increment in input.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -91,25 +97,25 @@ def _spec():
name="real_field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="imaginary_field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="abs_value",
type_names=["bool"],
optional=True,
- document="""Should use absolute value.""",
+ document=r"""Should use absolute value.""",
),
3: PinSpecification(
name="phase_increment",
type_names=["double"],
optional=True,
- document="""Phase increment (default is 10.0 degrees).""",
+ document=r"""Phase increment (default is 10.0 degrees).""",
),
},
map_output_pin_spec={
@@ -117,14 +123,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -133,29 +139,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="max_over_phase", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMaxOverPhase:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMaxOverPhase
+ inputs:
+ An instance of InputsMaxOverPhase.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMaxOverPhase:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMaxOverPhase
+ outputs:
+ An instance of OutputsMaxOverPhase.
"""
return super().outputs
@@ -190,12 +203,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._phase_increment)
@property
- def real_field(self):
- """Allows to connect real_field input to the operator.
+ def real_field(self) -> Input:
+ r"""Allows to connect real_field input to the operator.
- Parameters
- ----------
- my_real_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -208,12 +222,13 @@ def real_field(self):
return self._real_field
@property
- def imaginary_field(self):
- """Allows to connect imaginary_field input to the operator.
+ def imaginary_field(self) -> Input:
+ r"""Allows to connect imaginary_field input to the operator.
- Parameters
- ----------
- my_imaginary_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -226,14 +241,15 @@ def imaginary_field(self):
return self._imaginary_field
@property
- def abs_value(self):
- """Allows to connect abs_value input to the operator.
+ def abs_value(self) -> Input:
+ r"""Allows to connect abs_value input to the operator.
Should use absolute value.
- Parameters
- ----------
- my_abs_value : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -246,14 +262,15 @@ def abs_value(self):
return self._abs_value
@property
- def phase_increment(self):
- """Allows to connect phase_increment input to the operator.
+ def phase_increment(self) -> Input:
+ r"""Allows to connect phase_increment input to the operator.
Phase increment (default is 10.0 degrees).
- Parameters
- ----------
- my_phase_increment : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -284,18 +301,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.max_over_phase()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/min_max/max_over_time_by_entity.py b/src/ansys/dpf/core/operators/min_max/max_over_time_by_entity.py
index a46dd35bc9b..298cdb966ad 100644
--- a/src/ansys/dpf/core/operators/min_max/max_over_time_by_entity.py
+++ b/src/ansys/dpf/core/operators/min_max/max_over_time_by_entity.py
@@ -4,27 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class max_over_time_by_entity(Operator):
- """Evaluates maximum over time/frequency.
+ r"""Evaluates maximum over time/frequency.
+
Parameters
----------
- fields_container : FieldsContainer
- abs_value : bool, optional
+ fields_container: FieldsContainer
+ abs_value: bool, optional
Should use absolute value.
- compute_amplitude : bool, optional
+ compute_amplitude: bool, optional
Do calculate amplitude.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -71,8 +76,9 @@ def __init__(
self.inputs.compute_amplitude.connect(compute_amplitude)
@staticmethod
- def _spec():
- description = """Evaluates maximum over time/frequency."""
+ def _spec() -> Specification:
+ description = r"""Evaluates maximum over time/frequency.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -80,19 +86,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="abs_value",
type_names=["bool"],
optional=True,
- document="""Should use absolute value.""",
+ document=r"""Should use absolute value.""",
),
4: PinSpecification(
name="compute_amplitude",
type_names=["bool"],
optional=True,
- document="""Do calculate amplitude.""",
+ document=r"""Do calculate amplitude.""",
),
},
map_output_pin_spec={
@@ -100,14 +106,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -116,29 +122,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="max_over_time_by_entity", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMaxOverTimeByEntity:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMaxOverTimeByEntity
+ inputs:
+ An instance of InputsMaxOverTimeByEntity.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMaxOverTimeByEntity:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMaxOverTimeByEntity
+ outputs:
+ An instance of OutputsMaxOverTimeByEntity.
"""
return super().outputs
@@ -173,12 +186,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._compute_amplitude)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -191,14 +205,15 @@ def fields_container(self):
return self._fields_container
@property
- def abs_value(self):
- """Allows to connect abs_value input to the operator.
+ def abs_value(self) -> Input:
+ r"""Allows to connect abs_value input to the operator.
Should use absolute value.
- Parameters
- ----------
- my_abs_value : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -211,14 +226,15 @@ def abs_value(self):
return self._abs_value
@property
- def compute_amplitude(self):
- """Allows to connect compute_amplitude input to the operator.
+ def compute_amplitude(self) -> Input:
+ r"""Allows to connect compute_amplitude input to the operator.
Do calculate amplitude.
- Parameters
- ----------
- my_compute_amplitude : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -251,18 +267,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.max_over_time_by_entity()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/min_max/min_by_component.py b/src/ansys/dpf/core/operators/min_max/min_by_component.py
index d053b2c74de..e566117bbce 100644
--- a/src/ansys/dpf/core/operators/min_max/min_by_component.py
+++ b/src/ansys/dpf/core/operators/min_max/min_by_component.py
@@ -4,30 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class min_by_component(Operator):
- """Give the minimum for each element rank by comparing several fields.
+ r"""Give the minimum for each element rank by comparing several fields.
+
Parameters
----------
- use_absolute_value : bool
- Use_absolute_value
- field1 : Field or FieldsContainer
- Field or fields container with only one field
- is expected
- field2 : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ use_absolute_value: bool
+ use_absolute_value
+ field1: Field or FieldsContainer
+ field or fields container with only one field is expected
+ field2: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -74,10 +77,9 @@ def __init__(
self.inputs.field2.connect(field2)
@staticmethod
- def _spec():
- description = (
- """Give the minimum for each element rank by comparing several fields."""
- )
+ def _spec() -> Specification:
+ description = r"""Give the minimum for each element rank by comparing several fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -85,21 +87,19 @@ def _spec():
name="use_absolute_value",
type_names=["bool"],
optional=False,
- document="""Use_absolute_value""",
+ document=r"""use_absolute_value""",
),
1: PinSpecification(
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
2: PinSpecification(
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -107,14 +107,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -123,29 +123,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="min_by_component", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMinByComponent:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMinByComponent
+ inputs:
+ An instance of InputsMinByComponent.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMinByComponent:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMinByComponent
+ outputs:
+ An instance of OutputsMinByComponent.
"""
return super().outputs
@@ -178,14 +185,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field2)
@property
- def use_absolute_value(self):
- """Allows to connect use_absolute_value input to the operator.
+ def use_absolute_value(self) -> Input:
+ r"""Allows to connect use_absolute_value input to the operator.
- Use_absolute_value
+ use_absolute_value
- Parameters
- ----------
- my_use_absolute_value : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -198,15 +206,15 @@ def use_absolute_value(self):
return self._use_absolute_value
@property
- def field1(self):
- """Allows to connect field1 input to the operator.
+ def field1(self) -> Input:
+ r"""Allows to connect field1 input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field1 : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -219,15 +227,15 @@ def field1(self):
return self._field1
@property
- def field2(self):
- """Allows to connect field2 input to the operator.
+ def field2(self) -> Input:
+ r"""Allows to connect field2 input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field2 : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -258,18 +266,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_by_component()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/min_max/min_max.py b/src/ansys/dpf/core/operators/min_max/min_max.py
index dceff8ef37d..0a6c39480b3 100644
--- a/src/ansys/dpf/core/operators/min_max/min_max.py
+++ b/src/ansys/dpf/core/operators/min_max/min_max.py
@@ -4,27 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class min_max(Operator):
- """Compute the component-wise minimum (out 0) and maximum (out 1) over a
+ r"""Compute the component-wise minimum (out 0) and maximum (out 1) over a
field.
+
Parameters
----------
- field : Field or FieldsContainer
- Field or fields container with only one field
- is expected
+ field: Field or FieldsContainer
+ field or fields container with only one field is expected
Returns
-------
- field_min : Field
- field_max : Field
+ field_min: Field
+ field_max: Field
Examples
--------
@@ -55,9 +59,10 @@ def __init__(self, field=None, config=None, server=None):
self.inputs.field.connect(field)
@staticmethod
- def _spec():
- description = """Compute the component-wise minimum (out 0) and maximum (out 1) over a
- field."""
+ def _spec() -> Specification:
+ description = r"""Compute the component-wise minimum (out 0) and maximum (out 1) over a
+field.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -65,8 +70,7 @@ def _spec():
name="field",
type_names=["field", "fields_container"],
optional=False,
- document="""Field or fields container with only one field
- is expected""",
+ document=r"""field or fields container with only one field is expected""",
),
},
map_output_pin_spec={
@@ -74,20 +78,20 @@ def _spec():
name="field_min",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="field_max",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -96,29 +100,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="min_max", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMinMax:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMinMax
+ inputs:
+ An instance of InputsMinMax.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMinMax:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMinMax
+ outputs:
+ An instance of OutputsMinMax.
"""
return super().outputs
@@ -141,15 +152,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Field or fields container with only one field
- is expected
+ field or fields container with only one field is expected
- Parameters
- ----------
- my_field : Field or FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -183,35 +194,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._field_max)
@property
- def field_min(self):
- """Allows to get field_min output of the operator
+ def field_min(self) -> Output:
+ r"""Allows to get field_min output of the operator
Returns
- ----------
- my_field_min : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_min = op.outputs.field_min()
- """ # noqa: E501
+ """
return self._field_min
@property
- def field_max(self):
- """Allows to get field_max output of the operator
+ def field_max(self) -> Output:
+ r"""Allows to get field_max output of the operator
Returns
- ----------
- my_field_max : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_max = op.outputs.field_max()
- """ # noqa: E501
+ """
return self._field_max
diff --git a/src/ansys/dpf/core/operators/min_max/min_max_by_entity.py b/src/ansys/dpf/core/operators/min_max/min_max_by_entity.py
index 14f7693d455..2877886b0c8 100644
--- a/src/ansys/dpf/core/operators/min_max/min_max_by_entity.py
+++ b/src/ansys/dpf/core/operators/min_max/min_max_by_entity.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class min_max_by_entity(Operator):
- """Compute the entity-wise minimum (out 0) and maximum (out 1) through
- all fields of a fields container.
+ r"""Compute the entity-wise minimum (out 0) and maximum (out 1) through all
+ fields of a fields container.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- field_min : Field
- field_max : Field
+ field_min: Field
+ field_max: Field
Examples
--------
@@ -53,9 +58,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Compute the entity-wise minimum (out 0) and maximum (out 1) through
- all fields of a fields container."""
+ def _spec() -> Specification:
+ description = r"""Compute the entity-wise minimum (out 0) and maximum (out 1) through all
+fields of a fields container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -63,7 +69,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -71,20 +77,20 @@ def _spec():
name="field_min",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="field_max",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -93,29 +99,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="min_max_by_entity", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMinMaxByEntity:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMinMaxByEntity
+ inputs:
+ An instance of InputsMinMaxByEntity.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMinMaxByEntity:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMinMaxByEntity
+ outputs:
+ An instance of OutputsMinMaxByEntity.
"""
return super().outputs
@@ -140,12 +153,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -179,35 +193,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._field_max)
@property
- def field_min(self):
- """Allows to get field_min output of the operator
+ def field_min(self) -> Output:
+ r"""Allows to get field_min output of the operator
Returns
- ----------
- my_field_min : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_by_entity()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_min = op.outputs.field_min()
- """ # noqa: E501
+ """
return self._field_min
@property
- def field_max(self):
- """Allows to get field_max output of the operator
+ def field_max(self) -> Output:
+ r"""Allows to get field_max output of the operator
Returns
- ----------
- my_field_max : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_by_entity()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_max = op.outputs.field_max()
- """ # noqa: E501
+ """
return self._field_max
diff --git a/src/ansys/dpf/core/operators/min_max/min_max_by_time.py b/src/ansys/dpf/core/operators/min_max/min_max_by_time.py
index a84d0793a2b..a2479c4c404 100644
--- a/src/ansys/dpf/core/operators/min_max/min_max_by_time.py
+++ b/src/ansys/dpf/core/operators/min_max/min_max_by_time.py
@@ -4,29 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class min_max_by_time(Operator):
- """Evaluates minimum, maximum by time or frequency over all the entities
- of each field
+ r"""Evaluates minimum, maximum by time or frequency over all the entities of
+ each field
+
Parameters
----------
- fields_container : FieldsContainer
- compute_absolute_value : bool, optional
- Calculate the absolute value of field
- entities before computing the
- min/max.
+ fields_container: FieldsContainer
+ compute_absolute_value: bool, optional
+ Calculate the absolute value of field entities before computing the min/max.
Returns
-------
- min : FieldsContainer
- max : FieldsContainer
+ min: FieldsContainer
+ max: FieldsContainer
Examples
--------
@@ -68,9 +71,10 @@ def __init__(
self.inputs.compute_absolute_value.connect(compute_absolute_value)
@staticmethod
- def _spec():
- description = """Evaluates minimum, maximum by time or frequency over all the entities
- of each field"""
+ def _spec() -> Specification:
+ description = r"""Evaluates minimum, maximum by time or frequency over all the entities of
+each field
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -78,15 +82,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="compute_absolute_value",
type_names=["bool"],
optional=True,
- document="""Calculate the absolute value of field
- entities before computing the
- min/max.""",
+ document=r"""Calculate the absolute value of field entities before computing the min/max.""",
),
},
map_output_pin_spec={
@@ -94,20 +96,20 @@ def _spec():
name="min",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="max",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -116,29 +118,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="min_max_by_time", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMinMaxByTime:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMinMaxByTime
+ inputs:
+ An instance of InputsMinMaxByTime.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMinMaxByTime:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMinMaxByTime
+ outputs:
+ An instance of OutputsMinMaxByTime.
"""
return super().outputs
@@ -167,12 +176,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._compute_absolute_value)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -185,16 +195,15 @@ def fields_container(self):
return self._fields_container
@property
- def compute_absolute_value(self):
- """Allows to connect compute_absolute_value input to the operator.
+ def compute_absolute_value(self) -> Input:
+ r"""Allows to connect compute_absolute_value input to the operator.
- Calculate the absolute value of field
- entities before computing the
- min/max.
+ Calculate the absolute value of field entities before computing the min/max.
- Parameters
- ----------
- my_compute_absolute_value : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -228,35 +237,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._max)
@property
- def min(self):
- """Allows to get min output of the operator
+ def min(self) -> Output:
+ r"""Allows to get min output of the operator
Returns
- ----------
- my_min : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_by_time()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_min = op.outputs.min()
- """ # noqa: E501
+ """
return self._min
@property
- def max(self):
- """Allows to get max output of the operator
+ def max(self) -> Output:
+ r"""Allows to get max output of the operator
Returns
- ----------
- my_max : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_by_time()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_max = op.outputs.max()
- """ # noqa: E501
+ """
return self._max
diff --git a/src/ansys/dpf/core/operators/min_max/min_max_fc.py b/src/ansys/dpf/core/operators/min_max/min_max_fc.py
index d744593cf39..7aac8dd0ad9 100644
--- a/src/ansys/dpf/core/operators/min_max/min_max_fc.py
+++ b/src/ansys/dpf/core/operators/min_max/min_max_fc.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class min_max_fc(Operator):
- """Compute the component-wise minimum (out 0) and maximum (out 1) over a
+ r"""Compute the component-wise minimum (out 0) and maximum (out 1) over a
fields container.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- field_min : Field
- field_max : Field
+ field_min: Field
+ field_max: Field
Examples
--------
@@ -53,9 +58,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Compute the component-wise minimum (out 0) and maximum (out 1) over a
- fields container."""
+ def _spec() -> Specification:
+ description = r"""Compute the component-wise minimum (out 0) and maximum (out 1) over a
+fields container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -63,7 +69,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -71,20 +77,20 @@ def _spec():
name="field_min",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="field_max",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -93,29 +99,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="min_max_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMinMaxFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMinMaxFc
+ inputs:
+ An instance of InputsMinMaxFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMinMaxFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMinMaxFc
+ outputs:
+ An instance of OutputsMinMaxFc.
"""
return super().outputs
@@ -138,12 +151,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -177,35 +191,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._field_max)
@property
- def field_min(self):
- """Allows to get field_min output of the operator
+ def field_min(self) -> Output:
+ r"""Allows to get field_min output of the operator
Returns
- ----------
- my_field_min : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_min = op.outputs.field_min()
- """ # noqa: E501
+ """
return self._field_min
@property
- def field_max(self):
- """Allows to get field_max output of the operator
+ def field_max(self) -> Output:
+ r"""Allows to get field_max output of the operator
Returns
- ----------
- my_field_max : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_max = op.outputs.field_max()
- """ # noqa: E501
+ """
return self._field_max
diff --git a/src/ansys/dpf/core/operators/min_max/min_max_fc_inc.py b/src/ansys/dpf/core/operators/min_max/min_max_fc_inc.py
index b627b32ba71..0c62e5afdf1 100644
--- a/src/ansys/dpf/core/operators/min_max/min_max_fc_inc.py
+++ b/src/ansys/dpf/core/operators/min_max/min_max_fc_inc.py
@@ -4,25 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class min_max_fc_inc(Operator):
- """Compute the component-wise minimum (out 0) and maximum (out 1) over a
+ r"""Compute the component-wise minimum (out 0) and maximum (out 1) over a
fields container.
+
Parameters
----------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Returns
-------
- field_min : Field
- field_max : Field
+ field_min: Field
+ field_max: Field
Examples
--------
@@ -53,9 +58,10 @@ def __init__(self, fields_container=None, config=None, server=None):
self.inputs.fields_container.connect(fields_container)
@staticmethod
- def _spec():
- description = """Compute the component-wise minimum (out 0) and maximum (out 1) over a
- fields container."""
+ def _spec() -> Specification:
+ description = r"""Compute the component-wise minimum (out 0) and maximum (out 1) over a
+fields container.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -63,7 +69,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -71,20 +77,20 @@ def _spec():
name="field_min",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="field_max",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -93,29 +99,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="min_max_fc_inc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMinMaxFcInc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMinMaxFcInc
+ inputs:
+ An instance of InputsMinMaxFcInc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMinMaxFcInc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMinMaxFcInc
+ outputs:
+ An instance of OutputsMinMaxFcInc.
"""
return super().outputs
@@ -138,12 +151,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -177,35 +191,37 @@ def __init__(self, op: Operator):
self._outputs.append(self._field_max)
@property
- def field_min(self):
- """Allows to get field_min output of the operator
+ def field_min(self) -> Output:
+ r"""Allows to get field_min output of the operator
Returns
- ----------
- my_field_min : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_fc_inc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_min = op.outputs.field_min()
- """ # noqa: E501
+ """
return self._field_min
@property
- def field_max(self):
- """Allows to get field_max output of the operator
+ def field_max(self) -> Output:
+ r"""Allows to get field_max output of the operator
Returns
- ----------
- my_field_max : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_fc_inc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_max = op.outputs.field_max()
- """ # noqa: E501
+ """
return self._field_max
diff --git a/src/ansys/dpf/core/operators/min_max/min_max_inc.py b/src/ansys/dpf/core/operators/min_max/min_max_inc.py
index 1a89065ceb7..7dda1ed2a68 100644
--- a/src/ansys/dpf/core/operators/min_max/min_max_inc.py
+++ b/src/ansys/dpf/core/operators/min_max/min_max_inc.py
@@ -4,28 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class min_max_inc(Operator):
- """Compute the component-wise minimum (out 0) and maximum (out 1) over
+ r"""Compute the component-wise minimum (out 0) and maximum (out 1) over
coming fields.
+
Parameters
----------
- field : Field
- domain_id : int, optional
+ field: Field
+ domain_id: int, optional
Returns
-------
- field_min : Field
- field_max : Field
- domain_ids_min : Scoping
- domain_ids_max : Scoping
+ field_min: Field
+ field_max: Field
+ domain_ids_min: Scoping
+ domain_ids_max: Scoping
Examples
--------
@@ -63,9 +68,10 @@ def __init__(self, field=None, domain_id=None, config=None, server=None):
self.inputs.domain_id.connect(domain_id)
@staticmethod
- def _spec():
- description = """Compute the component-wise minimum (out 0) and maximum (out 1) over
- coming fields."""
+ def _spec() -> Specification:
+ description = r"""Compute the component-wise minimum (out 0) and maximum (out 1) over
+coming fields.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -73,13 +79,13 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
17: PinSpecification(
name="domain_id",
type_names=["int32"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -87,32 +93,32 @@ def _spec():
name="field_min",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="field_max",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="domain_ids_min",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="domain_ids_max",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -121,29 +127,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="min_max_inc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMinMaxInc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMinMaxInc
+ inputs:
+ An instance of InputsMinMaxInc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMinMaxInc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMinMaxInc
+ outputs:
+ An instance of OutputsMinMaxInc.
"""
return super().outputs
@@ -170,12 +183,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._domain_id)
@property
- def field(self):
- """Allows to connect field input to the operator.
+ def field(self) -> Input:
+ r"""Allows to connect field input to the operator.
- Parameters
- ----------
- my_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -188,12 +202,13 @@ def field(self):
return self._field
@property
- def domain_id(self):
- """Allows to connect domain_id input to the operator.
+ def domain_id(self) -> Input:
+ r"""Allows to connect domain_id input to the operator.
- Parameters
- ----------
- my_domain_id : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -233,69 +248,73 @@ def __init__(self, op: Operator):
self._outputs.append(self._domain_ids_max)
@property
- def field_min(self):
- """Allows to get field_min output of the operator
+ def field_min(self) -> Output:
+ r"""Allows to get field_min output of the operator
Returns
- ----------
- my_field_min : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_inc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_min = op.outputs.field_min()
- """ # noqa: E501
+ """
return self._field_min
@property
- def field_max(self):
- """Allows to get field_max output of the operator
+ def field_max(self) -> Output:
+ r"""Allows to get field_max output of the operator
Returns
- ----------
- my_field_max : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_inc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_max = op.outputs.field_max()
- """ # noqa: E501
+ """
return self._field_max
@property
- def domain_ids_min(self):
- """Allows to get domain_ids_min output of the operator
+ def domain_ids_min(self) -> Output:
+ r"""Allows to get domain_ids_min output of the operator
Returns
- ----------
- my_domain_ids_min : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_inc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_domain_ids_min = op.outputs.domain_ids_min()
- """ # noqa: E501
+ """
return self._domain_ids_min
@property
- def domain_ids_max(self):
- """Allows to get domain_ids_max output of the operator
+ def domain_ids_max(self) -> Output:
+ r"""Allows to get domain_ids_max output of the operator
Returns
- ----------
- my_domain_ids_max : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_inc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_domain_ids_max = op.outputs.domain_ids_max()
- """ # noqa: E501
+ """
return self._domain_ids_max
diff --git a/src/ansys/dpf/core/operators/min_max/min_max_over_label_fc.py b/src/ansys/dpf/core/operators/min_max/min_max_over_label_fc.py
index 41287238c1d..7aea51b57ae 100644
--- a/src/ansys/dpf/core/operators/min_max/min_max_over_label_fc.py
+++ b/src/ansys/dpf/core/operators/min_max/min_max_over_label_fc.py
@@ -4,39 +4,44 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class min_max_over_label_fc(Operator):
- """Create two fields (0 min 1 max) by looping over the fields container
- in input and taking the min/max value by component through all the
- fields having the same id for the label set in input (in pin 1).
- If no label is specified or if the specified label doesn't exist,
- the operation is done over all the fields. The fields out are
- located on the label set in input, so their scoping are the labels
- ids.For each min max value, the label id for one other fields
- container labels is kept and returned in a scoping in pin 2 (min)
- and 3 (max).The field's scoping ids of the value kept in min max
- are also returned in the scopings in pin 4 (min) and 5 (max).
+ r"""Create two fields (0 min 1 max) by looping over the fields container in
+ input and taking the min/max value by component through all the fields
+ having the same id for the label set in input (in pin 1). If no label is
+ specified or if the specified label doesn’t exist, the operation is done
+ over all the fields. The fields out are located on the label set in
+ input, so their scoping are the labels ids.For each min max value, the
+ label id for one other fields container labels is kept and returned in a
+ scoping in pin 2 (min) and 3 (max).The field’s scoping ids of the value
+ kept in min max are also returned in the scopings in pin 4 (min) and 5
+ (max).
+
Parameters
----------
- fields_container : FieldsContainer
- label : str
- Label name from the fields container
+ fields_container: FieldsContainer
+ label: str
+ label name from the fields container
Returns
-------
- field_min : Field
- field_max : Field
- domain_ids_min : Scoping, optional
- domain_ids_max : Scoping, optional
- scoping_ids_min : Scoping
- scoping_ids_max : Scoping
+ field_min: Field
+ field_max: Field
+ domain_ids_min: Scoping, optional
+ domain_ids_max: Scoping, optional
+ scoping_ids_min: Scoping
+ scoping_ids_max: Scoping
Examples
--------
@@ -76,19 +81,18 @@ def __init__(self, fields_container=None, label=None, config=None, server=None):
self.inputs.label.connect(label)
@staticmethod
- def _spec():
- description = """Create two fields (0 min 1 max) by looping over the fields container
- in input and taking the min/max value by component through
- all the fields having the same id for the label set in
- input (in pin 1). If no label is specified or if the
- specified label doesn't exist, the operation is done over
- all the fields. The fields out are located on the label
- set in input, so their scoping are the labels ids.For each
- min max value, the label id for one other fields container
- labels is kept and returned in a scoping in pin 2 (min)
- and 3 (max).The field's scoping ids of the value kept in
- min max are also returned in the scopings in pin 4 (min)
- and 5 (max)."""
+ def _spec() -> Specification:
+ description = r"""Create two fields (0 min 1 max) by looping over the fields container in
+input and taking the min/max value by component through all the fields
+having the same id for the label set in input (in pin 1). If no label is
+specified or if the specified label doesn’t exist, the operation is done
+over all the fields. The fields out are located on the label set in
+input, so their scoping are the labels ids.For each min max value, the
+label id for one other fields container labels is kept and returned in a
+scoping in pin 2 (min) and 3 (max).The field’s scoping ids of the value
+kept in min max are also returned in the scopings in pin 4 (min) and 5
+(max).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -96,13 +100,13 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="label",
type_names=["string"],
optional=False,
- document="""Label name from the fields container""",
+ document=r"""label name from the fields container""",
),
},
map_output_pin_spec={
@@ -110,44 +114,44 @@ def _spec():
name="field_min",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="field_max",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="domain_ids_min",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="domain_ids_max",
type_names=["scoping"],
optional=True,
- document="""""",
+ document=r"""""",
),
4: PinSpecification(
name="scoping_ids_min",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
5: PinSpecification(
name="scoping_ids_max",
type_names=["scoping"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -156,29 +160,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="min_max_over_label_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMinMaxOverLabelFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMinMaxOverLabelFc
+ inputs:
+ An instance of InputsMinMaxOverLabelFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMinMaxOverLabelFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMinMaxOverLabelFc
+ outputs:
+ An instance of OutputsMinMaxOverLabelFc.
"""
return super().outputs
@@ -207,12 +218,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._label)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -225,14 +237,15 @@ def fields_container(self):
return self._fields_container
@property
- def label(self):
- """Allows to connect label input to the operator.
+ def label(self) -> Input:
+ r"""Allows to connect label input to the operator.
- Label name from the fields container
+ label name from the fields container
- Parameters
- ----------
- my_label : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -286,103 +299,109 @@ def __init__(self, op: Operator):
self._outputs.append(self._scoping_ids_max)
@property
- def field_min(self):
- """Allows to get field_min output of the operator
+ def field_min(self) -> Output:
+ r"""Allows to get field_min output of the operator
Returns
- ----------
- my_field_min : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_over_label_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_min = op.outputs.field_min()
- """ # noqa: E501
+ """
return self._field_min
@property
- def field_max(self):
- """Allows to get field_max output of the operator
+ def field_max(self) -> Output:
+ r"""Allows to get field_max output of the operator
Returns
- ----------
- my_field_max : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_over_label_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_max = op.outputs.field_max()
- """ # noqa: E501
+ """
return self._field_max
@property
- def domain_ids_min(self):
- """Allows to get domain_ids_min output of the operator
+ def domain_ids_min(self) -> Output:
+ r"""Allows to get domain_ids_min output of the operator
Returns
- ----------
- my_domain_ids_min : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_over_label_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_domain_ids_min = op.outputs.domain_ids_min()
- """ # noqa: E501
+ """
return self._domain_ids_min
@property
- def domain_ids_max(self):
- """Allows to get domain_ids_max output of the operator
+ def domain_ids_max(self) -> Output:
+ r"""Allows to get domain_ids_max output of the operator
Returns
- ----------
- my_domain_ids_max : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_over_label_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_domain_ids_max = op.outputs.domain_ids_max()
- """ # noqa: E501
+ """
return self._domain_ids_max
@property
- def scoping_ids_min(self):
- """Allows to get scoping_ids_min output of the operator
+ def scoping_ids_min(self) -> Output:
+ r"""Allows to get scoping_ids_min output of the operator
Returns
- ----------
- my_scoping_ids_min : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_over_label_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping_ids_min = op.outputs.scoping_ids_min()
- """ # noqa: E501
+ """
return self._scoping_ids_min
@property
- def scoping_ids_max(self):
- """Allows to get scoping_ids_max output of the operator
+ def scoping_ids_max(self) -> Output:
+ r"""Allows to get scoping_ids_max output of the operator
Returns
- ----------
- my_scoping_ids_max : Scoping
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_over_label_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_scoping_ids_max = op.outputs.scoping_ids_max()
- """ # noqa: E501
+ """
return self._scoping_ids_max
diff --git a/src/ansys/dpf/core/operators/min_max/min_max_over_time_by_entity.py b/src/ansys/dpf/core/operators/min_max/min_max_over_time_by_entity.py
index 08554c92be6..0c55ee32335 100644
--- a/src/ansys/dpf/core/operators/min_max/min_max_over_time_by_entity.py
+++ b/src/ansys/dpf/core/operators/min_max/min_max_over_time_by_entity.py
@@ -4,33 +4,36 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class min_max_over_time_by_entity(Operator):
- """Evaluates minimum, maximum over time/frequency and returns those min
- max as well as the time/freq where they occurred
+ r"""Evaluates minimum, maximum over time/frequency and returns those min max
+ as well as the time/freq where they occurred
+
Parameters
----------
- fields_container : FieldsContainer
- compute_absolute_value : bool, optional
- Calculate the absolute value of field
- entities before computing the
- min/max.
- compute_amplitude : bool, optional
+ fields_container: FieldsContainer
+ compute_absolute_value: bool, optional
+ Calculate the absolute value of field entities before computing the min/max.
+ compute_amplitude: bool, optional
Do calculate amplitude.
Returns
-------
- min : FieldsContainer
- max : FieldsContainer
- time_freq_of_min : FieldsContainer
- time_freq_of_max : FieldsContainer
+ min: FieldsContainer
+ max: FieldsContainer
+ time_freq_of_min: FieldsContainer
+ time_freq_of_max: FieldsContainer
Examples
--------
@@ -82,9 +85,10 @@ def __init__(
self.inputs.compute_amplitude.connect(compute_amplitude)
@staticmethod
- def _spec():
- description = """Evaluates minimum, maximum over time/frequency and returns those min
- max as well as the time/freq where they occurred"""
+ def _spec() -> Specification:
+ description = r"""Evaluates minimum, maximum over time/frequency and returns those min max
+as well as the time/freq where they occurred
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -92,21 +96,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="compute_absolute_value",
type_names=["bool"],
optional=True,
- document="""Calculate the absolute value of field
- entities before computing the
- min/max.""",
+ document=r"""Calculate the absolute value of field entities before computing the min/max.""",
),
4: PinSpecification(
name="compute_amplitude",
type_names=["bool"],
optional=True,
- document="""Do calculate amplitude.""",
+ document=r"""Do calculate amplitude.""",
),
},
map_output_pin_spec={
@@ -114,32 +116,32 @@ def _spec():
name="min",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="max",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="time_freq_of_min",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="time_freq_of_max",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -148,31 +150,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="min_max_over_time_by_entity", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsMinMaxOverTimeByEntity:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMinMaxOverTimeByEntity
+ inputs:
+ An instance of InputsMinMaxOverTimeByEntity.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMinMaxOverTimeByEntity:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMinMaxOverTimeByEntity
+ outputs:
+ An instance of OutputsMinMaxOverTimeByEntity.
"""
return super().outputs
@@ -209,12 +218,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._compute_amplitude)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -227,16 +237,15 @@ def fields_container(self):
return self._fields_container
@property
- def compute_absolute_value(self):
- """Allows to connect compute_absolute_value input to the operator.
+ def compute_absolute_value(self) -> Input:
+ r"""Allows to connect compute_absolute_value input to the operator.
- Calculate the absolute value of field
- entities before computing the
- min/max.
+ Calculate the absolute value of field entities before computing the min/max.
- Parameters
- ----------
- my_compute_absolute_value : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -249,14 +258,15 @@ def compute_absolute_value(self):
return self._compute_absolute_value
@property
- def compute_amplitude(self):
- """Allows to connect compute_amplitude input to the operator.
+ def compute_amplitude(self) -> Input:
+ r"""Allows to connect compute_amplitude input to the operator.
Do calculate amplitude.
- Parameters
- ----------
- my_compute_amplitude : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -300,69 +310,73 @@ def __init__(self, op: Operator):
self._outputs.append(self._time_freq_of_max)
@property
- def min(self):
- """Allows to get min output of the operator
+ def min(self) -> Output:
+ r"""Allows to get min output of the operator
Returns
- ----------
- my_min : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_over_time_by_entity()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_min = op.outputs.min()
- """ # noqa: E501
+ """
return self._min
@property
- def max(self):
- """Allows to get max output of the operator
+ def max(self) -> Output:
+ r"""Allows to get max output of the operator
Returns
- ----------
- my_max : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_over_time_by_entity()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_max = op.outputs.max()
- """ # noqa: E501
+ """
return self._max
@property
- def time_freq_of_min(self):
- """Allows to get time_freq_of_min output of the operator
+ def time_freq_of_min(self) -> Output:
+ r"""Allows to get time_freq_of_min output of the operator
Returns
- ----------
- my_time_freq_of_min : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_over_time_by_entity()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_time_freq_of_min = op.outputs.time_freq_of_min()
- """ # noqa: E501
+ """
return self._time_freq_of_min
@property
- def time_freq_of_max(self):
- """Allows to get time_freq_of_max output of the operator
+ def time_freq_of_max(self) -> Output:
+ r"""Allows to get time_freq_of_max output of the operator
Returns
- ----------
- my_time_freq_of_max : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_max_over_time_by_entity()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_time_freq_of_max = op.outputs.time_freq_of_max()
- """ # noqa: E501
+ """
return self._time_freq_of_max
diff --git a/src/ansys/dpf/core/operators/min_max/min_over_time_by_entity.py b/src/ansys/dpf/core/operators/min_max/min_over_time_by_entity.py
index b85352e9214..e62ea14acf7 100644
--- a/src/ansys/dpf/core/operators/min_max/min_over_time_by_entity.py
+++ b/src/ansys/dpf/core/operators/min_max/min_over_time_by_entity.py
@@ -4,27 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class min_over_time_by_entity(Operator):
- """Evaluates minimum over time/frequency.
+ r"""Evaluates minimum over time/frequency.
+
Parameters
----------
- fields_container : FieldsContainer
- abs_value : bool, optional
+ fields_container: FieldsContainer
+ abs_value: bool, optional
Should use absolute value.
- compute_amplitude : bool, optional
+ compute_amplitude: bool, optional
Do calculate amplitude.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -71,8 +76,9 @@ def __init__(
self.inputs.compute_amplitude.connect(compute_amplitude)
@staticmethod
- def _spec():
- description = """Evaluates minimum over time/frequency."""
+ def _spec() -> Specification:
+ description = r"""Evaluates minimum over time/frequency.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -80,19 +86,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="abs_value",
type_names=["bool"],
optional=True,
- document="""Should use absolute value.""",
+ document=r"""Should use absolute value.""",
),
4: PinSpecification(
name="compute_amplitude",
type_names=["bool"],
optional=True,
- document="""Do calculate amplitude.""",
+ document=r"""Do calculate amplitude.""",
),
},
map_output_pin_spec={
@@ -100,14 +106,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -116,29 +122,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="min_over_time_by_entity", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsMinOverTimeByEntity:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsMinOverTimeByEntity
+ inputs:
+ An instance of InputsMinOverTimeByEntity.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsMinOverTimeByEntity:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsMinOverTimeByEntity
+ outputs:
+ An instance of OutputsMinOverTimeByEntity.
"""
return super().outputs
@@ -173,12 +186,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._compute_amplitude)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -191,14 +205,15 @@ def fields_container(self):
return self._fields_container
@property
- def abs_value(self):
- """Allows to connect abs_value input to the operator.
+ def abs_value(self) -> Input:
+ r"""Allows to connect abs_value input to the operator.
Should use absolute value.
- Parameters
- ----------
- my_abs_value : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -211,14 +226,15 @@ def abs_value(self):
return self._abs_value
@property
- def compute_amplitude(self):
- """Allows to connect compute_amplitude input to the operator.
+ def compute_amplitude(self) -> Input:
+ r"""Allows to connect compute_amplitude input to the operator.
Do calculate amplitude.
- Parameters
- ----------
- my_compute_amplitude : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -251,18 +267,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.min_over_time_by_entity()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/min_max/phase_of_max.py b/src/ansys/dpf/core/operators/min_max/phase_of_max.py
index bdc8a87a5b2..708aeb36483 100644
--- a/src/ansys/dpf/core/operators/min_max/phase_of_max.py
+++ b/src/ansys/dpf/core/operators/min_max/phase_of_max.py
@@ -4,28 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class phase_of_max(Operator):
- """Evaluates phase of maximum.
+ r"""Evaluates phase of maximum.
+
Parameters
----------
- real_field : Field
- imaginary_field : Field
- abs_value : bool, optional
+ real_field: Field
+ imaginary_field: Field
+ abs_value: bool, optional
Should use absolute value.
- phase_increment : float
+ phase_increment: float
Phase increment.
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -78,8 +83,9 @@ def __init__(
self.inputs.phase_increment.connect(phase_increment)
@staticmethod
- def _spec():
- description = """Evaluates phase of maximum."""
+ def _spec() -> Specification:
+ description = r"""Evaluates phase of maximum.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -87,25 +93,25 @@ def _spec():
name="real_field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="imaginary_field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="abs_value",
type_names=["bool"],
optional=True,
- document="""Should use absolute value.""",
+ document=r"""Should use absolute value.""",
),
3: PinSpecification(
name="phase_increment",
type_names=["double"],
optional=False,
- document="""Phase increment.""",
+ document=r"""Phase increment.""",
),
},
map_output_pin_spec={
@@ -113,14 +119,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -129,29 +135,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="phase_of_max", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsPhaseOfMax:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsPhaseOfMax
+ inputs:
+ An instance of InputsPhaseOfMax.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsPhaseOfMax:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsPhaseOfMax
+ outputs:
+ An instance of OutputsPhaseOfMax.
"""
return super().outputs
@@ -186,12 +199,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._phase_increment)
@property
- def real_field(self):
- """Allows to connect real_field input to the operator.
+ def real_field(self) -> Input:
+ r"""Allows to connect real_field input to the operator.
- Parameters
- ----------
- my_real_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -204,12 +218,13 @@ def real_field(self):
return self._real_field
@property
- def imaginary_field(self):
- """Allows to connect imaginary_field input to the operator.
+ def imaginary_field(self) -> Input:
+ r"""Allows to connect imaginary_field input to the operator.
- Parameters
- ----------
- my_imaginary_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -222,14 +237,15 @@ def imaginary_field(self):
return self._imaginary_field
@property
- def abs_value(self):
- """Allows to connect abs_value input to the operator.
+ def abs_value(self) -> Input:
+ r"""Allows to connect abs_value input to the operator.
Should use absolute value.
- Parameters
- ----------
- my_abs_value : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -242,14 +258,15 @@ def abs_value(self):
return self._abs_value
@property
- def phase_increment(self):
- """Allows to connect phase_increment input to the operator.
+ def phase_increment(self) -> Input:
+ r"""Allows to connect phase_increment input to the operator.
Phase increment.
- Parameters
- ----------
- my_phase_increment : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -280,18 +297,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.phase_of_max()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/min_max/time_of_max_by_entity.py b/src/ansys/dpf/core/operators/min_max/time_of_max_by_entity.py
index d9f93a5d334..83430b5d8f8 100644
--- a/src/ansys/dpf/core/operators/min_max/time_of_max_by_entity.py
+++ b/src/ansys/dpf/core/operators/min_max/time_of_max_by_entity.py
@@ -4,27 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class time_of_max_by_entity(Operator):
- """Evaluates time/frequency of maximum.
+ r"""Evaluates time/frequency of maximum.
+
Parameters
----------
- fields_container : FieldsContainer
- abs_value : bool, optional
+ fields_container: FieldsContainer
+ abs_value: bool, optional
Should use absolute value.
- compute_amplitude : bool, optional
+ compute_amplitude: bool, optional
Do calculate amplitude.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -71,8 +76,9 @@ def __init__(
self.inputs.compute_amplitude.connect(compute_amplitude)
@staticmethod
- def _spec():
- description = """Evaluates time/frequency of maximum."""
+ def _spec() -> Specification:
+ description = r"""Evaluates time/frequency of maximum.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -80,19 +86,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="abs_value",
type_names=["bool"],
optional=True,
- document="""Should use absolute value.""",
+ document=r"""Should use absolute value.""",
),
4: PinSpecification(
name="compute_amplitude",
type_names=["bool"],
optional=True,
- document="""Do calculate amplitude.""",
+ document=r"""Do calculate amplitude.""",
),
},
map_output_pin_spec={
@@ -100,14 +106,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -116,29 +122,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="time_of_max_by_entity", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsTimeOfMaxByEntity:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTimeOfMaxByEntity
+ inputs:
+ An instance of InputsTimeOfMaxByEntity.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTimeOfMaxByEntity:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTimeOfMaxByEntity
+ outputs:
+ An instance of OutputsTimeOfMaxByEntity.
"""
return super().outputs
@@ -173,12 +186,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._compute_amplitude)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -191,14 +205,15 @@ def fields_container(self):
return self._fields_container
@property
- def abs_value(self):
- """Allows to connect abs_value input to the operator.
+ def abs_value(self) -> Input:
+ r"""Allows to connect abs_value input to the operator.
Should use absolute value.
- Parameters
- ----------
- my_abs_value : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -211,14 +226,15 @@ def abs_value(self):
return self._abs_value
@property
- def compute_amplitude(self):
- """Allows to connect compute_amplitude input to the operator.
+ def compute_amplitude(self) -> Input:
+ r"""Allows to connect compute_amplitude input to the operator.
Do calculate amplitude.
- Parameters
- ----------
- my_compute_amplitude : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -251,18 +267,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.time_of_max_by_entity()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/min_max/time_of_min_by_entity.py b/src/ansys/dpf/core/operators/min_max/time_of_min_by_entity.py
index 4062cb05be8..795d33892d5 100644
--- a/src/ansys/dpf/core/operators/min_max/time_of_min_by_entity.py
+++ b/src/ansys/dpf/core/operators/min_max/time_of_min_by_entity.py
@@ -4,27 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class time_of_min_by_entity(Operator):
- """Evaluates time/frequency of minimum.
+ r"""Evaluates time/frequency of minimum.
+
Parameters
----------
- fields_container : FieldsContainer
- abs_value : bool, optional
+ fields_container: FieldsContainer
+ abs_value: bool, optional
Should use absolute value.
- compute_amplitude : bool, optional
+ compute_amplitude: bool, optional
Do calculate amplitude.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -71,8 +76,9 @@ def __init__(
self.inputs.compute_amplitude.connect(compute_amplitude)
@staticmethod
- def _spec():
- description = """Evaluates time/frequency of minimum."""
+ def _spec() -> Specification:
+ description = r"""Evaluates time/frequency of minimum.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -80,19 +86,19 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="abs_value",
type_names=["bool"],
optional=True,
- document="""Should use absolute value.""",
+ document=r"""Should use absolute value.""",
),
4: PinSpecification(
name="compute_amplitude",
type_names=["bool"],
optional=True,
- document="""Do calculate amplitude.""",
+ document=r"""Do calculate amplitude.""",
),
},
map_output_pin_spec={
@@ -100,14 +106,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -116,29 +122,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="time_of_min_by_entity", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsTimeOfMinByEntity:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsTimeOfMinByEntity
+ inputs:
+ An instance of InputsTimeOfMinByEntity.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsTimeOfMinByEntity:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsTimeOfMinByEntity
+ outputs:
+ An instance of OutputsTimeOfMinByEntity.
"""
return super().outputs
@@ -173,12 +186,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._compute_amplitude)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -191,14 +205,15 @@ def fields_container(self):
return self._fields_container
@property
- def abs_value(self):
- """Allows to connect abs_value input to the operator.
+ def abs_value(self) -> Input:
+ r"""Allows to connect abs_value input to the operator.
Should use absolute value.
- Parameters
- ----------
- my_abs_value : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -211,14 +226,15 @@ def abs_value(self):
return self._abs_value
@property
- def compute_amplitude(self):
- """Allows to connect compute_amplitude input to the operator.
+ def compute_amplitude(self) -> Input:
+ r"""Allows to connect compute_amplitude input to the operator.
Do calculate amplitude.
- Parameters
- ----------
- my_compute_amplitude : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -251,18 +267,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.min_max.time_of_min_by_entity()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/__init__.py b/src/ansys/dpf/core/operators/result/__init__.py
index afdfd316d8d..a36ef4cc0fb 100644
--- a/src/ansys/dpf/core/operators/result/__init__.py
+++ b/src/ansys/dpf/core/operators/result/__init__.py
@@ -247,6 +247,7 @@
from .poynting_vector_surface import poynting_vector_surface
from .pressure import pressure
from .pres_to_field import pres_to_field
+from .pretension import pretension
from .prns_to_field import prns_to_field
from .raw_displacement import raw_displacement
from .raw_reaction_force import raw_reaction_force
diff --git a/src/ansys/dpf/core/operators/result/acceleration.py b/src/ansys/dpf/core/operators/result/acceleration.py
index dcc58d02e99..10fa03b5aa7 100644
--- a/src/ansys/dpf/core/operators/result/acceleration.py
+++ b/src/ansys/dpf/core/operators/result/acceleration.py
@@ -4,83 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class acceleration(Operator):
- """Read/compute nodal accelerations by calling the readers defined by the
+ r"""Read/compute nodal accelerations by calling the readers defined by the
datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded, use if cyclic expansion is to
- be done.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
- phi : float, optional
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ mesh. If cylic expansion is to be done, mesh of the base sector
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded, use if cyclic expansion is to be done.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
+ phi: float, optional
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -175,9 +142,10 @@ def __init__(
self.inputs.phi.connect(phi)
@staticmethod
- def _spec():
- description = """Read/compute nodal accelerations by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute nodal accelerations by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -192,105 +160,67 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh. if cylic expansion is to be done, mesh
- of the base sector""",
+ document=r"""mesh. If cylic expansion is to be done, mesh of the base sector""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded, use if cyclic expansion is to
- be done.""",
+ document=r"""mesh expanded, use if cyclic expansion is to be done.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.""",
+ document=r"""angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.""",
),
},
map_output_pin_spec={
@@ -298,14 +228,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -314,29 +244,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="A", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAcceleration:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAcceleration
+ inputs:
+ An instance of InputsAcceleration.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAcceleration:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAcceleration
+ outputs:
+ An instance of OutputsAcceleration.
"""
return super().outputs
@@ -403,28 +340,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._phi)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -437,24 +361,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -467,15 +382,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -488,15 +403,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -509,15 +424,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -530,15 +445,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -551,15 +466,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
+ mesh. If cylic expansion is to be done, mesh of the base sector
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -572,18 +487,15 @@ def mesh(self):
return self._mesh
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -596,15 +508,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded, use if cyclic expansion is to
- be done.
+ mesh expanded, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -617,17 +529,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -640,15 +550,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -679,18 +589,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.acceleration()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/acceleration_X.py b/src/ansys/dpf/core/operators/result/acceleration_X.py
index 3fd8a31ca16..87746bf448e 100644
--- a/src/ansys/dpf/core/operators/result/acceleration_X.py
+++ b/src/ansys/dpf/core/operators/result/acceleration_X.py
@@ -4,72 +4,44 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class acceleration_X(Operator):
- """Read/compute nodal accelerations X component of the vector (1st
+ r"""Read/compute nodal accelerations X component of the vector (1st
component) by calling the readers defined by the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -146,10 +118,10 @@ def __init__(
self.inputs.read_cyclic.connect(read_cyclic)
@staticmethod
- def _spec():
- description = """Read/compute nodal accelerations X component of the vector (1st
- component) by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute nodal accelerations X component of the vector (1st
+component) by calling the readers defined by the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -164,82 +136,49 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
},
map_output_pin_spec={
@@ -247,14 +186,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -263,29 +202,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="AX", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAccelerationX:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAccelerationX
+ inputs:
+ An instance of InputsAccelerationX.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAccelerationX:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAccelerationX
+ outputs:
+ An instance of OutputsAccelerationX.
"""
return super().outputs
@@ -338,28 +284,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_cyclic)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -372,24 +305,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -402,15 +326,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -423,15 +347,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -444,15 +368,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -465,15 +389,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -486,15 +410,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -507,18 +431,15 @@ def mesh(self):
return self._mesh
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -549,18 +470,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.acceleration_X()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/acceleration_Y.py b/src/ansys/dpf/core/operators/result/acceleration_Y.py
index 5a46a1b6efe..6c8ef68fa60 100644
--- a/src/ansys/dpf/core/operators/result/acceleration_Y.py
+++ b/src/ansys/dpf/core/operators/result/acceleration_Y.py
@@ -4,72 +4,44 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class acceleration_Y(Operator):
- """Read/compute nodal accelerations Y component of the vector (2nd
+ r"""Read/compute nodal accelerations Y component of the vector (2nd
component) by calling the readers defined by the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -146,10 +118,10 @@ def __init__(
self.inputs.read_cyclic.connect(read_cyclic)
@staticmethod
- def _spec():
- description = """Read/compute nodal accelerations Y component of the vector (2nd
- component) by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute nodal accelerations Y component of the vector (2nd
+component) by calling the readers defined by the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -164,82 +136,49 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
},
map_output_pin_spec={
@@ -247,14 +186,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -263,29 +202,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="AY", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAccelerationY:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAccelerationY
+ inputs:
+ An instance of InputsAccelerationY.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAccelerationY:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAccelerationY
+ outputs:
+ An instance of OutputsAccelerationY.
"""
return super().outputs
@@ -338,28 +284,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_cyclic)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -372,24 +305,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -402,15 +326,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -423,15 +347,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -444,15 +368,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -465,15 +389,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -486,15 +410,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -507,18 +431,15 @@ def mesh(self):
return self._mesh
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -549,18 +470,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.acceleration_Y()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/acceleration_Z.py b/src/ansys/dpf/core/operators/result/acceleration_Z.py
index 50519bb33a1..aa1df8bd5d4 100644
--- a/src/ansys/dpf/core/operators/result/acceleration_Z.py
+++ b/src/ansys/dpf/core/operators/result/acceleration_Z.py
@@ -4,72 +4,44 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class acceleration_Z(Operator):
- """Read/compute nodal accelerations Z component of the vector (3rd
+ r"""Read/compute nodal accelerations Z component of the vector (3rd
component) by calling the readers defined by the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -146,10 +118,10 @@ def __init__(
self.inputs.read_cyclic.connect(read_cyclic)
@staticmethod
- def _spec():
- description = """Read/compute nodal accelerations Z component of the vector (3rd
- component) by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute nodal accelerations Z component of the vector (3rd
+component) by calling the readers defined by the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -164,82 +136,49 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
},
map_output_pin_spec={
@@ -247,14 +186,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -263,29 +202,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="AZ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAccelerationZ:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAccelerationZ
+ inputs:
+ An instance of InputsAccelerationZ.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAccelerationZ:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAccelerationZ
+ outputs:
+ An instance of OutputsAccelerationZ.
"""
return super().outputs
@@ -338,28 +284,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_cyclic)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -372,24 +305,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -402,15 +326,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -423,15 +347,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -444,15 +368,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -465,15 +389,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -486,15 +410,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -507,18 +431,15 @@ def mesh(self):
return self._mesh
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -549,18 +470,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.acceleration_Z()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py b/src/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py
index efc42c3ce33..5c23789990f 100644
--- a/src/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py
+++ b/src/ansys/dpf/core/operators/result/accu_eqv_creep_strain.py
@@ -4,91 +4,52 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class accu_eqv_creep_strain(Operator):
- """Read/compute element nodal accumulated equivalent creep strain by
- calling the readers defined by the datasources. Regarding the
- requested location and the input mesh scoping, the result location
- can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element nodal accumulated equivalent creep strain by
+ calling the readers defined by the datasources. Regarding the requested
+ location and the input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -183,11 +144,12 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element nodal accumulated equivalent creep strain by
- calling the readers defined by the datasources. Regarding
- the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal accumulated equivalent creep strain by
+calling the readers defined by the datasources. Regarding the requested
+location and the input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -202,111 +164,67 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -314,14 +232,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -330,29 +248,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ENL_CREQ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAccuEqvCreepStrain:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAccuEqvCreepStrain
+ inputs:
+ An instance of InputsAccuEqvCreepStrain.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAccuEqvCreepStrain:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAccuEqvCreepStrain
+ outputs:
+ An instance of OutputsAccuEqvCreepStrain.
"""
return super().outputs
@@ -435,28 +360,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -469,24 +381,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -499,15 +402,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -520,15 +423,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -541,15 +444,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,15 +465,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -583,15 +486,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -604,15 +507,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -625,15 +528,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -646,20 +549,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -672,21 +570,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -719,18 +611,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.accu_eqv_creep_strain()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py b/src/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py
index d3ef9061ad7..5d1216c83c0 100644
--- a/src/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py
+++ b/src/ansys/dpf/core/operators/result/accu_eqv_plastic_strain.py
@@ -4,91 +4,52 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class accu_eqv_plastic_strain(Operator):
- """Read/compute element nodal accumulated equivalent plastic strain by
- calling the readers defined by the datasources. Regarding the
- requested location and the input mesh scoping, the result location
- can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element nodal accumulated equivalent plastic strain by
+ calling the readers defined by the datasources. Regarding the requested
+ location and the input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -183,11 +144,12 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element nodal accumulated equivalent plastic strain by
- calling the readers defined by the datasources. Regarding
- the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal accumulated equivalent plastic strain by
+calling the readers defined by the datasources. Regarding the requested
+location and the input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -202,111 +164,67 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -314,14 +232,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -330,29 +248,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ENL_EPEQ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAccuEqvPlasticStrain:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAccuEqvPlasticStrain
+ inputs:
+ An instance of InputsAccuEqvPlasticStrain.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAccuEqvPlasticStrain:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAccuEqvPlasticStrain
+ outputs:
+ An instance of OutputsAccuEqvPlasticStrain.
"""
return super().outputs
@@ -435,28 +360,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -469,24 +381,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -499,15 +402,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -520,15 +423,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -541,15 +444,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,15 +465,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -583,15 +486,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -604,15 +507,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -625,15 +528,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -646,20 +549,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -672,21 +570,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -719,18 +611,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.accu_eqv_plastic_strain()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/add_rigid_body_motion.py b/src/ansys/dpf/core/operators/result/add_rigid_body_motion.py
index 85500b1c0e1..37d1e47ff4f 100644
--- a/src/ansys/dpf/core/operators/result/add_rigid_body_motion.py
+++ b/src/ansys/dpf/core/operators/result/add_rigid_body_motion.py
@@ -4,31 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class add_rigid_body_motion(Operator):
- """Adds a given rigid translation, center and rotation from a
- displacement field. The rotation is given in terms of rotations
- angles. Note that the displacement field has to be in the global
- coordinate system
+ r"""Adds a given rigid translation, center and rotation from a displacement
+ field. The rotation is given in terms of rotations angles. Note that the
+ displacement field has to be in the global coordinate system
+
Parameters
----------
- displacement_field : Field
- translation_field : Field
- rotation_field : Field
- center_field : Field
- mesh : MeshedRegion, optional
- Default is the mesh in the support
+ displacement_field: Field
+ translation_field: Field
+ rotation_field: Field
+ center_field: Field
+ mesh: MeshedRegion, optional
+ default is the mesh in the support
Returns
-------
- field : Field
+ field: Field
Examples
--------
@@ -87,11 +91,11 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Adds a given rigid translation, center and rotation from a
- displacement field. The rotation is given in terms of
- rotations angles. Note that the displacement field has to
- be in the global coordinate system"""
+ def _spec() -> Specification:
+ description = r"""Adds a given rigid translation, center and rotation from a displacement
+field. The rotation is given in terms of rotations angles. Note that the
+displacement field has to be in the global coordinate system
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -99,31 +103,31 @@ def _spec():
name="displacement_field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="translation_field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="rotation_field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="center_field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""Default is the mesh in the support""",
+ document=r"""default is the mesh in the support""",
),
},
map_output_pin_spec={
@@ -131,14 +135,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -147,29 +151,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="RigidBodyAddition", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAddRigidBodyMotion:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAddRigidBodyMotion
+ inputs:
+ An instance of InputsAddRigidBodyMotion.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAddRigidBodyMotion:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAddRigidBodyMotion
+ outputs:
+ An instance of OutputsAddRigidBodyMotion.
"""
return super().outputs
@@ -216,12 +227,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def displacement_field(self):
- """Allows to connect displacement_field input to the operator.
+ def displacement_field(self) -> Input:
+ r"""Allows to connect displacement_field input to the operator.
- Parameters
- ----------
- my_displacement_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -234,12 +246,13 @@ def displacement_field(self):
return self._displacement_field
@property
- def translation_field(self):
- """Allows to connect translation_field input to the operator.
+ def translation_field(self) -> Input:
+ r"""Allows to connect translation_field input to the operator.
- Parameters
- ----------
- my_translation_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -252,12 +265,13 @@ def translation_field(self):
return self._translation_field
@property
- def rotation_field(self):
- """Allows to connect rotation_field input to the operator.
+ def rotation_field(self) -> Input:
+ r"""Allows to connect rotation_field input to the operator.
- Parameters
- ----------
- my_rotation_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -270,12 +284,13 @@ def rotation_field(self):
return self._rotation_field
@property
- def center_field(self):
- """Allows to connect center_field input to the operator.
+ def center_field(self) -> Input:
+ r"""Allows to connect center_field input to the operator.
- Parameters
- ----------
- my_center_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -288,14 +303,15 @@ def center_field(self):
return self._center_field
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Default is the mesh in the support
+ default is the mesh in the support
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -326,18 +342,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.add_rigid_body_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/result/add_rigid_body_motion_fc.py b/src/ansys/dpf/core/operators/result/add_rigid_body_motion_fc.py
index 306aacd8b3e..9129c8c400e 100644
--- a/src/ansys/dpf/core/operators/result/add_rigid_body_motion_fc.py
+++ b/src/ansys/dpf/core/operators/result/add_rigid_body_motion_fc.py
@@ -4,31 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class add_rigid_body_motion_fc(Operator):
- """Adds a given rigid translation, center and rotation from a
- displacement field. The rotation is given in terms of rotations
- angles. Note that the displacement field has to be in the global
- coordinate system
+ r"""Adds a given rigid translation, center and rotation from a displacement
+ field. The rotation is given in terms of rotations angles. Note that the
+ displacement field has to be in the global coordinate system
+
Parameters
----------
- fields_container : FieldsContainer
- translation_field : Field
- rotation_field : Field
- center_field : Field
- mesh : MeshedRegion, optional
- Default is the mesh in the support
+ fields_container: FieldsContainer
+ translation_field: Field
+ rotation_field: Field
+ center_field: Field
+ mesh: MeshedRegion, optional
+ default is the mesh in the support
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -87,11 +91,11 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Adds a given rigid translation, center and rotation from a
- displacement field. The rotation is given in terms of
- rotations angles. Note that the displacement field has to
- be in the global coordinate system"""
+ def _spec() -> Specification:
+ description = r"""Adds a given rigid translation, center and rotation from a displacement
+field. The rotation is given in terms of rotations angles. Note that the
+displacement field has to be in the global coordinate system
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -99,31 +103,31 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="translation_field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="rotation_field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="center_field",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""Default is the mesh in the support""",
+ document=r"""default is the mesh in the support""",
),
},
map_output_pin_spec={
@@ -131,14 +135,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -147,29 +151,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="RigidBodyAddition_fc", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsAddRigidBodyMotionFc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsAddRigidBodyMotionFc
+ inputs:
+ An instance of InputsAddRigidBodyMotionFc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsAddRigidBodyMotionFc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsAddRigidBodyMotionFc
+ outputs:
+ An instance of OutputsAddRigidBodyMotionFc.
"""
return super().outputs
@@ -216,12 +227,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -234,12 +246,13 @@ def fields_container(self):
return self._fields_container
@property
- def translation_field(self):
- """Allows to connect translation_field input to the operator.
+ def translation_field(self) -> Input:
+ r"""Allows to connect translation_field input to the operator.
- Parameters
- ----------
- my_translation_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -252,12 +265,13 @@ def translation_field(self):
return self._translation_field
@property
- def rotation_field(self):
- """Allows to connect rotation_field input to the operator.
+ def rotation_field(self) -> Input:
+ r"""Allows to connect rotation_field input to the operator.
- Parameters
- ----------
- my_rotation_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -270,12 +284,13 @@ def rotation_field(self):
return self._rotation_field
@property
- def center_field(self):
- """Allows to connect center_field input to the operator.
+ def center_field(self) -> Input:
+ r"""Allows to connect center_field input to the operator.
- Parameters
- ----------
- my_center_field : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -288,14 +303,15 @@ def center_field(self):
return self._center_field
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Default is the mesh in the support
+ default is the mesh in the support
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -328,18 +344,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.add_rigid_body_motion_fc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/artificial_hourglass_energy.py b/src/ansys/dpf/core/operators/result/artificial_hourglass_energy.py
index 90ffa9aac82..a1810dee907 100644
--- a/src/ansys/dpf/core/operators/result/artificial_hourglass_energy.py
+++ b/src/ansys/dpf/core/operators/result/artificial_hourglass_energy.py
@@ -4,66 +4,42 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class artificial_hourglass_energy(Operator):
- """Read/compute artificial hourglass energy by calling the readers
- defined by the datasources.
+ r"""Read/compute artificial hourglass energy by calling the readers defined
+ by the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -134,9 +110,10 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Read/compute artificial hourglass energy by calling the readers
- defined by the datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute artificial hourglass energy by calling the readers defined
+by the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -151,72 +128,43 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
},
map_output_pin_spec={
@@ -224,14 +172,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -240,29 +188,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ENG_AHO", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsArtificialHourglassEnergy:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsArtificialHourglassEnergy
+ inputs:
+ An instance of InputsArtificialHourglassEnergy.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsArtificialHourglassEnergy:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsArtificialHourglassEnergy
+ outputs:
+ An instance of OutputsArtificialHourglassEnergy.
"""
return super().outputs
@@ -321,28 +276,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -355,24 +297,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -385,15 +318,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -406,15 +339,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -427,15 +360,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -448,15 +381,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -469,15 +402,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -510,18 +443,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.artificial_hourglass_energy()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/beam_axial_force.py b/src/ansys/dpf/core/operators/result/beam_axial_force.py
index e8c8fdb39f8..6ae32926f43 100644
--- a/src/ansys/dpf/core/operators/result/beam_axial_force.py
+++ b/src/ansys/dpf/core/operators/result/beam_axial_force.py
@@ -4,51 +4,38 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class beam_axial_force(Operator):
- """Read Beam Axial Force (LSDyna) by calling the readers defined by the
+ r"""Read Beam Axial Force (LSDyna) by calling the readers defined by the
datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Elements scoping required in output.
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- unit_system : int or str or UnitSystem, optional
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ elements scoping required in output.
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ unit_system: int or str or UnitSystem, optional
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -107,9 +94,10 @@ def __init__(
self.inputs.unit_system.connect(unit_system)
@staticmethod
- def _spec():
- description = """Read Beam Axial Force (LSDyna) by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read Beam Axial Force (LSDyna) by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -124,41 +112,25 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Elements scoping required in output.""",
+ document=r"""elements scoping required in output.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
50: PinSpecification(
name="unit_system",
@@ -168,9 +140,7 @@ def _spec():
"class dataProcessing::unit::CUnitSystem",
],
optional=True,
- document="""Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance""",
+ document=r"""Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance""",
),
},
map_output_pin_spec={
@@ -178,14 +148,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -194,29 +164,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="B_N", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsBeamAxialForce:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsBeamAxialForce
+ inputs:
+ An instance of InputsBeamAxialForce.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsBeamAxialForce:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsBeamAxialForce
+ outputs:
+ An instance of OutputsBeamAxialForce.
"""
return super().outputs
@@ -257,28 +234,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._unit_system)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -291,14 +255,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Elements scoping required in output.
+ elements scoping required in output.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -311,15 +276,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -332,15 +297,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -353,16 +318,15 @@ def data_sources(self):
return self._data_sources
@property
- def unit_system(self):
- """Allows to connect unit_system input to the operator.
+ def unit_system(self) -> Input:
+ r"""Allows to connect unit_system input to the operator.
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
- Parameters
- ----------
- my_unit_system : int or str or UnitSystem
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -393,18 +357,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.beam_axial_force()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/beam_axial_plastic_strain.py b/src/ansys/dpf/core/operators/result/beam_axial_plastic_strain.py
index dd224c191e8..a7c921360d2 100644
--- a/src/ansys/dpf/core/operators/result/beam_axial_plastic_strain.py
+++ b/src/ansys/dpf/core/operators/result/beam_axial_plastic_strain.py
@@ -4,55 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class beam_axial_plastic_strain(Operator):
- """Read Beam Axial Plastic strain (LSDyna) by calling the readers defined
+ r"""Read Beam Axial Plastic strain (LSDyna) by calling the readers defined
by the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Elements scoping required in output.
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- integration_point : int, optional
- Integration point where the result will be
- read from. default value: 0 (first
- integration point).
- unit_system : int or str or UnitSystem, optional
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ elements scoping required in output.
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ integration_point: int, optional
+ integration point where the result will be read from. Default value: 0 (first integration point).
+ unit_system: int or str or UnitSystem, optional
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -117,9 +102,10 @@ def __init__(
self.inputs.unit_system.connect(unit_system)
@staticmethod
- def _spec():
- description = """Read Beam Axial Plastic strain (LSDyna) by calling the readers defined
- by the datasources."""
+ def _spec() -> Specification:
+ description = r"""Read Beam Axial Plastic strain (LSDyna) by calling the readers defined
+by the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -134,49 +120,31 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Elements scoping required in output.""",
+ document=r"""elements scoping required in output.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
6: PinSpecification(
name="integration_point",
type_names=["int32"],
optional=True,
- document="""Integration point where the result will be
- read from. default value: 0 (first
- integration point).""",
+ document=r"""integration point where the result will be read from. Default value: 0 (first integration point).""",
),
50: PinSpecification(
name="unit_system",
@@ -186,9 +154,7 @@ def _spec():
"class dataProcessing::unit::CUnitSystem",
],
optional=True,
- document="""Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance""",
+ document=r"""Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance""",
),
},
map_output_pin_spec={
@@ -196,14 +162,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -212,29 +178,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="B_EPPL", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsBeamAxialPlasticStrain:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsBeamAxialPlasticStrain
+ inputs:
+ An instance of InputsBeamAxialPlasticStrain.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsBeamAxialPlasticStrain:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsBeamAxialPlasticStrain
+ outputs:
+ An instance of OutputsBeamAxialPlasticStrain.
"""
return super().outputs
@@ -289,28 +262,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._unit_system)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -323,14 +283,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Elements scoping required in output.
+ elements scoping required in output.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -343,15 +304,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -364,15 +325,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -385,16 +346,15 @@ def data_sources(self):
return self._data_sources
@property
- def integration_point(self):
- """Allows to connect integration_point input to the operator.
+ def integration_point(self) -> Input:
+ r"""Allows to connect integration_point input to the operator.
- Integration point where the result will be
- read from. default value: 0 (first
- integration point).
+ integration point where the result will be read from. Default value: 0 (first integration point).
- Parameters
- ----------
- my_integration_point : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -407,16 +367,15 @@ def integration_point(self):
return self._integration_point
@property
- def unit_system(self):
- """Allows to connect unit_system input to the operator.
+ def unit_system(self) -> Input:
+ r"""Allows to connect unit_system input to the operator.
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
- Parameters
- ----------
- my_unit_system : int or str or UnitSystem
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -449,18 +408,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.beam_axial_plastic_strain()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/beam_axial_stress.py b/src/ansys/dpf/core/operators/result/beam_axial_stress.py
index fcfa7c753a1..f2c9fc3ec0b 100644
--- a/src/ansys/dpf/core/operators/result/beam_axial_stress.py
+++ b/src/ansys/dpf/core/operators/result/beam_axial_stress.py
@@ -4,55 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class beam_axial_stress(Operator):
- """Read Beam Axial Stress (LSDyna) by calling the readers defined by the
+ r"""Read Beam Axial Stress (LSDyna) by calling the readers defined by the
datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Elements scoping required in output.
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- integration_point : int, optional
- Integration point where the result will be
- read from. default value: 0 (first
- integration point).
- unit_system : int or str or UnitSystem, optional
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ elements scoping required in output.
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ integration_point: int, optional
+ integration point where the result will be read from. Default value: 0 (first integration point).
+ unit_system: int or str or UnitSystem, optional
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -117,9 +102,10 @@ def __init__(
self.inputs.unit_system.connect(unit_system)
@staticmethod
- def _spec():
- description = """Read Beam Axial Stress (LSDyna) by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read Beam Axial Stress (LSDyna) by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -134,49 +120,31 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Elements scoping required in output.""",
+ document=r"""elements scoping required in output.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
6: PinSpecification(
name="integration_point",
type_names=["int32"],
optional=True,
- document="""Integration point where the result will be
- read from. default value: 0 (first
- integration point).""",
+ document=r"""integration point where the result will be read from. Default value: 0 (first integration point).""",
),
50: PinSpecification(
name="unit_system",
@@ -186,9 +154,7 @@ def _spec():
"class dataProcessing::unit::CUnitSystem",
],
optional=True,
- document="""Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance""",
+ document=r"""Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance""",
),
},
map_output_pin_spec={
@@ -196,14 +162,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -212,29 +178,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="B_SN", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsBeamAxialStress:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsBeamAxialStress
+ inputs:
+ An instance of InputsBeamAxialStress.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsBeamAxialStress:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsBeamAxialStress
+ outputs:
+ An instance of OutputsBeamAxialStress.
"""
return super().outputs
@@ -281,28 +254,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._unit_system)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -315,14 +275,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Elements scoping required in output.
+ elements scoping required in output.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -335,15 +296,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -356,15 +317,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -377,16 +338,15 @@ def data_sources(self):
return self._data_sources
@property
- def integration_point(self):
- """Allows to connect integration_point input to the operator.
+ def integration_point(self) -> Input:
+ r"""Allows to connect integration_point input to the operator.
- Integration point where the result will be
- read from. default value: 0 (first
- integration point).
+ integration point where the result will be read from. Default value: 0 (first integration point).
- Parameters
- ----------
- my_integration_point : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -399,16 +359,15 @@ def integration_point(self):
return self._integration_point
@property
- def unit_system(self):
- """Allows to connect unit_system input to the operator.
+ def unit_system(self) -> Input:
+ r"""Allows to connect unit_system input to the operator.
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
- Parameters
- ----------
- my_unit_system : int or str or UnitSystem
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -439,18 +398,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.beam_axial_stress()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/beam_axial_total_strain.py b/src/ansys/dpf/core/operators/result/beam_axial_total_strain.py
index b4ff5ce316b..abc32505c1c 100644
--- a/src/ansys/dpf/core/operators/result/beam_axial_total_strain.py
+++ b/src/ansys/dpf/core/operators/result/beam_axial_total_strain.py
@@ -4,55 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class beam_axial_total_strain(Operator):
- """Read Beam Axial Total strain (LSDyna) by calling the readers defined
- by the datasources.
+ r"""Read Beam Axial Total strain (LSDyna) by calling the readers defined by
+ the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Elements scoping required in output.
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- integration_point : int, optional
- Integration point where the result will be
- read from. default value: 0 (first
- integration point).
- unit_system : int or str or UnitSystem, optional
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ elements scoping required in output.
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ integration_point: int, optional
+ integration point where the result will be read from. Default value: 0 (first integration point).
+ unit_system: int or str or UnitSystem, optional
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -117,9 +102,10 @@ def __init__(
self.inputs.unit_system.connect(unit_system)
@staticmethod
- def _spec():
- description = """Read Beam Axial Total strain (LSDyna) by calling the readers defined
- by the datasources."""
+ def _spec() -> Specification:
+ description = r"""Read Beam Axial Total strain (LSDyna) by calling the readers defined by
+the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -134,49 +120,31 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Elements scoping required in output.""",
+ document=r"""elements scoping required in output.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
6: PinSpecification(
name="integration_point",
type_names=["int32"],
optional=True,
- document="""Integration point where the result will be
- read from. default value: 0 (first
- integration point).""",
+ document=r"""integration point where the result will be read from. Default value: 0 (first integration point).""",
),
50: PinSpecification(
name="unit_system",
@@ -186,9 +154,7 @@ def _spec():
"class dataProcessing::unit::CUnitSystem",
],
optional=True,
- document="""Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance""",
+ document=r"""Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance""",
),
},
map_output_pin_spec={
@@ -196,14 +162,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -212,29 +178,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="B_EL", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsBeamAxialTotalStrain:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsBeamAxialTotalStrain
+ inputs:
+ An instance of InputsBeamAxialTotalStrain.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsBeamAxialTotalStrain:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsBeamAxialTotalStrain
+ outputs:
+ An instance of OutputsBeamAxialTotalStrain.
"""
return super().outputs
@@ -289,28 +262,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._unit_system)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -323,14 +283,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Elements scoping required in output.
+ elements scoping required in output.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -343,15 +304,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -364,15 +325,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -385,16 +346,15 @@ def data_sources(self):
return self._data_sources
@property
- def integration_point(self):
- """Allows to connect integration_point input to the operator.
+ def integration_point(self) -> Input:
+ r"""Allows to connect integration_point input to the operator.
- Integration point where the result will be
- read from. default value: 0 (first
- integration point).
+ integration point where the result will be read from. Default value: 0 (first integration point).
- Parameters
- ----------
- my_integration_point : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -407,16 +367,15 @@ def integration_point(self):
return self._integration_point
@property
- def unit_system(self):
- """Allows to connect unit_system input to the operator.
+ def unit_system(self) -> Input:
+ r"""Allows to connect unit_system input to the operator.
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
- Parameters
- ----------
- my_unit_system : int or str or UnitSystem
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -449,18 +408,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.beam_axial_total_strain()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/beam_rs_shear_stress.py b/src/ansys/dpf/core/operators/result/beam_rs_shear_stress.py
index b9b008e257e..e55003dc7d8 100644
--- a/src/ansys/dpf/core/operators/result/beam_rs_shear_stress.py
+++ b/src/ansys/dpf/core/operators/result/beam_rs_shear_stress.py
@@ -4,55 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class beam_rs_shear_stress(Operator):
- """Read Beam RS Shear Stress (LSDyna) by calling the readers defined by
- the datasources.
+ r"""Read Beam RS Shear Stress (LSDyna) by calling the readers defined by the
+ datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Elements scoping required in output.
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- integration_point : int, optional
- Integration point where the result will be
- read from. default value: 0 (first
- integration point).
- unit_system : int or str or UnitSystem, optional
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ elements scoping required in output.
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ integration_point: int, optional
+ integration point where the result will be read from. Default value: 0 (first integration point).
+ unit_system: int or str or UnitSystem, optional
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -117,9 +102,10 @@ def __init__(
self.inputs.unit_system.connect(unit_system)
@staticmethod
- def _spec():
- description = """Read Beam RS Shear Stress (LSDyna) by calling the readers defined by
- the datasources."""
+ def _spec() -> Specification:
+ description = r"""Read Beam RS Shear Stress (LSDyna) by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -134,49 +120,31 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Elements scoping required in output.""",
+ document=r"""elements scoping required in output.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
6: PinSpecification(
name="integration_point",
type_names=["int32"],
optional=True,
- document="""Integration point where the result will be
- read from. default value: 0 (first
- integration point).""",
+ document=r"""integration point where the result will be read from. Default value: 0 (first integration point).""",
),
50: PinSpecification(
name="unit_system",
@@ -186,9 +154,7 @@ def _spec():
"class dataProcessing::unit::CUnitSystem",
],
optional=True,
- document="""Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance""",
+ document=r"""Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance""",
),
},
map_output_pin_spec={
@@ -196,14 +162,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -212,29 +178,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="B_ST1", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsBeamRsShearStress:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsBeamRsShearStress
+ inputs:
+ An instance of InputsBeamRsShearStress.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsBeamRsShearStress:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsBeamRsShearStress
+ outputs:
+ An instance of OutputsBeamRsShearStress.
"""
return super().outputs
@@ -283,28 +256,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._unit_system)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -317,14 +277,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Elements scoping required in output.
+ elements scoping required in output.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -337,15 +298,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -358,15 +319,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -379,16 +340,15 @@ def data_sources(self):
return self._data_sources
@property
- def integration_point(self):
- """Allows to connect integration_point input to the operator.
+ def integration_point(self) -> Input:
+ r"""Allows to connect integration_point input to the operator.
- Integration point where the result will be
- read from. default value: 0 (first
- integration point).
+ integration point where the result will be read from. Default value: 0 (first integration point).
- Parameters
- ----------
- my_integration_point : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -401,16 +361,15 @@ def integration_point(self):
return self._integration_point
@property
- def unit_system(self):
- """Allows to connect unit_system input to the operator.
+ def unit_system(self) -> Input:
+ r"""Allows to connect unit_system input to the operator.
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
- Parameters
- ----------
- my_unit_system : int or str or UnitSystem
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -443,18 +402,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.beam_rs_shear_stress()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/beam_s_bending_moment.py b/src/ansys/dpf/core/operators/result/beam_s_bending_moment.py
index d36852ee68e..27e891912c8 100644
--- a/src/ansys/dpf/core/operators/result/beam_s_bending_moment.py
+++ b/src/ansys/dpf/core/operators/result/beam_s_bending_moment.py
@@ -4,51 +4,38 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class beam_s_bending_moment(Operator):
- """Read Beam S Bending Moment (LSDyna) by calling the readers defined by
+ r"""Read Beam S Bending Moment (LSDyna) by calling the readers defined by
the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Elements scoping required in output.
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- unit_system : int or str or UnitSystem, optional
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ elements scoping required in output.
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ unit_system: int or str or UnitSystem, optional
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -107,9 +94,10 @@ def __init__(
self.inputs.unit_system.connect(unit_system)
@staticmethod
- def _spec():
- description = """Read Beam S Bending Moment (LSDyna) by calling the readers defined by
- the datasources."""
+ def _spec() -> Specification:
+ description = r"""Read Beam S Bending Moment (LSDyna) by calling the readers defined by
+the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -124,41 +112,25 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Elements scoping required in output.""",
+ document=r"""elements scoping required in output.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
50: PinSpecification(
name="unit_system",
@@ -168,9 +140,7 @@ def _spec():
"class dataProcessing::unit::CUnitSystem",
],
optional=True,
- document="""Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance""",
+ document=r"""Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance""",
),
},
map_output_pin_spec={
@@ -178,14 +148,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -194,29 +164,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="B_M1", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsBeamSBendingMoment:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsBeamSBendingMoment
+ inputs:
+ An instance of InputsBeamSBendingMoment.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsBeamSBendingMoment:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsBeamSBendingMoment
+ outputs:
+ An instance of OutputsBeamSBendingMoment.
"""
return super().outputs
@@ -265,28 +242,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._unit_system)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -299,14 +263,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Elements scoping required in output.
+ elements scoping required in output.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -319,15 +284,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -340,15 +305,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -361,16 +326,15 @@ def data_sources(self):
return self._data_sources
@property
- def unit_system(self):
- """Allows to connect unit_system input to the operator.
+ def unit_system(self) -> Input:
+ r"""Allows to connect unit_system input to the operator.
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
- Parameters
- ----------
- my_unit_system : int or str or UnitSystem
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -403,18 +367,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.beam_s_bending_moment()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/beam_s_shear_force.py b/src/ansys/dpf/core/operators/result/beam_s_shear_force.py
index e8c6ef7c615..0e7cc1b0c69 100644
--- a/src/ansys/dpf/core/operators/result/beam_s_shear_force.py
+++ b/src/ansys/dpf/core/operators/result/beam_s_shear_force.py
@@ -4,51 +4,38 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class beam_s_shear_force(Operator):
- """Read Beam S Shear Force (LSDyna) by calling the readers defined by the
+ r"""Read Beam S Shear Force (LSDyna) by calling the readers defined by the
datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Elements scoping required in output.
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- unit_system : int or str or UnitSystem, optional
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ elements scoping required in output.
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ unit_system: int or str or UnitSystem, optional
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -107,9 +94,10 @@ def __init__(
self.inputs.unit_system.connect(unit_system)
@staticmethod
- def _spec():
- description = """Read Beam S Shear Force (LSDyna) by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read Beam S Shear Force (LSDyna) by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -124,41 +112,25 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Elements scoping required in output.""",
+ document=r"""elements scoping required in output.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
50: PinSpecification(
name="unit_system",
@@ -168,9 +140,7 @@ def _spec():
"class dataProcessing::unit::CUnitSystem",
],
optional=True,
- document="""Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance""",
+ document=r"""Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance""",
),
},
map_output_pin_spec={
@@ -178,14 +148,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -194,29 +164,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="B_T1", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsBeamSShearForce:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsBeamSShearForce
+ inputs:
+ An instance of InputsBeamSShearForce.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsBeamSShearForce:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsBeamSShearForce
+ outputs:
+ An instance of OutputsBeamSShearForce.
"""
return super().outputs
@@ -257,28 +234,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._unit_system)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -291,14 +255,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Elements scoping required in output.
+ elements scoping required in output.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -311,15 +276,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -332,15 +297,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -353,16 +318,15 @@ def data_sources(self):
return self._data_sources
@property
- def unit_system(self):
- """Allows to connect unit_system input to the operator.
+ def unit_system(self) -> Input:
+ r"""Allows to connect unit_system input to the operator.
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
- Parameters
- ----------
- my_unit_system : int or str or UnitSystem
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -393,18 +357,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.beam_s_shear_force()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/beam_t_bending_moment.py b/src/ansys/dpf/core/operators/result/beam_t_bending_moment.py
index 0301ac00cb6..88e29f4e389 100644
--- a/src/ansys/dpf/core/operators/result/beam_t_bending_moment.py
+++ b/src/ansys/dpf/core/operators/result/beam_t_bending_moment.py
@@ -4,51 +4,38 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class beam_t_bending_moment(Operator):
- """Read Beam T Bending Moment (LSDyna) by calling the readers defined by
+ r"""Read Beam T Bending Moment (LSDyna) by calling the readers defined by
the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Elements scoping required in output.
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- unit_system : int or str or UnitSystem, optional
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ elements scoping required in output.
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ unit_system: int or str or UnitSystem, optional
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -107,9 +94,10 @@ def __init__(
self.inputs.unit_system.connect(unit_system)
@staticmethod
- def _spec():
- description = """Read Beam T Bending Moment (LSDyna) by calling the readers defined by
- the datasources."""
+ def _spec() -> Specification:
+ description = r"""Read Beam T Bending Moment (LSDyna) by calling the readers defined by
+the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -124,41 +112,25 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Elements scoping required in output.""",
+ document=r"""elements scoping required in output.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
50: PinSpecification(
name="unit_system",
@@ -168,9 +140,7 @@ def _spec():
"class dataProcessing::unit::CUnitSystem",
],
optional=True,
- document="""Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance""",
+ document=r"""Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance""",
),
},
map_output_pin_spec={
@@ -178,14 +148,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -194,29 +164,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="B_M2", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsBeamTBendingMoment:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsBeamTBendingMoment
+ inputs:
+ An instance of InputsBeamTBendingMoment.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsBeamTBendingMoment:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsBeamTBendingMoment
+ outputs:
+ An instance of OutputsBeamTBendingMoment.
"""
return super().outputs
@@ -265,28 +242,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._unit_system)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -299,14 +263,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Elements scoping required in output.
+ elements scoping required in output.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -319,15 +284,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -340,15 +305,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -361,16 +326,15 @@ def data_sources(self):
return self._data_sources
@property
- def unit_system(self):
- """Allows to connect unit_system input to the operator.
+ def unit_system(self) -> Input:
+ r"""Allows to connect unit_system input to the operator.
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
- Parameters
- ----------
- my_unit_system : int or str or UnitSystem
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -403,18 +367,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.beam_t_bending_moment()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/beam_t_shear_force.py b/src/ansys/dpf/core/operators/result/beam_t_shear_force.py
index 36a54274d64..285d8df3e53 100644
--- a/src/ansys/dpf/core/operators/result/beam_t_shear_force.py
+++ b/src/ansys/dpf/core/operators/result/beam_t_shear_force.py
@@ -4,51 +4,38 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class beam_t_shear_force(Operator):
- """Read Beam T Shear Force (LSDyna) by calling the readers defined by the
+ r"""Read Beam T Shear Force (LSDyna) by calling the readers defined by the
datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Elements scoping required in output.
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- unit_system : int or str or UnitSystem, optional
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ elements scoping required in output.
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ unit_system: int or str or UnitSystem, optional
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -107,9 +94,10 @@ def __init__(
self.inputs.unit_system.connect(unit_system)
@staticmethod
- def _spec():
- description = """Read Beam T Shear Force (LSDyna) by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read Beam T Shear Force (LSDyna) by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -124,41 +112,25 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Elements scoping required in output.""",
+ document=r"""elements scoping required in output.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
50: PinSpecification(
name="unit_system",
@@ -168,9 +140,7 @@ def _spec():
"class dataProcessing::unit::CUnitSystem",
],
optional=True,
- document="""Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance""",
+ document=r"""Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance""",
),
},
map_output_pin_spec={
@@ -178,14 +148,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -194,29 +164,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="B_T2", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsBeamTShearForce:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsBeamTShearForce
+ inputs:
+ An instance of InputsBeamTShearForce.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsBeamTShearForce:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsBeamTShearForce
+ outputs:
+ An instance of OutputsBeamTShearForce.
"""
return super().outputs
@@ -257,28 +234,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._unit_system)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -291,14 +255,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Elements scoping required in output.
+ elements scoping required in output.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -311,15 +276,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -332,15 +297,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -353,16 +318,15 @@ def data_sources(self):
return self._data_sources
@property
- def unit_system(self):
- """Allows to connect unit_system input to the operator.
+ def unit_system(self) -> Input:
+ r"""Allows to connect unit_system input to the operator.
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
- Parameters
- ----------
- my_unit_system : int or str or UnitSystem
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -393,18 +357,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.beam_t_shear_force()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/beam_torsional_moment.py b/src/ansys/dpf/core/operators/result/beam_torsional_moment.py
index 0dfcc47775e..5b23d72534d 100644
--- a/src/ansys/dpf/core/operators/result/beam_torsional_moment.py
+++ b/src/ansys/dpf/core/operators/result/beam_torsional_moment.py
@@ -4,51 +4,38 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class beam_torsional_moment(Operator):
- """Read Beam Torsional Moment (LSDyna) by calling the readers defined by
+ r"""Read Beam Torsional Moment (LSDyna) by calling the readers defined by
the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Elements scoping required in output.
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- unit_system : int or str or UnitSystem, optional
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ elements scoping required in output.
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ unit_system: int or str or UnitSystem, optional
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -107,9 +94,10 @@ def __init__(
self.inputs.unit_system.connect(unit_system)
@staticmethod
- def _spec():
- description = """Read Beam Torsional Moment (LSDyna) by calling the readers defined by
- the datasources."""
+ def _spec() -> Specification:
+ description = r"""Read Beam Torsional Moment (LSDyna) by calling the readers defined by
+the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -124,41 +112,25 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Elements scoping required in output.""",
+ document=r"""elements scoping required in output.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
50: PinSpecification(
name="unit_system",
@@ -168,9 +140,7 @@ def _spec():
"class dataProcessing::unit::CUnitSystem",
],
optional=True,
- document="""Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance""",
+ document=r"""Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance""",
),
},
map_output_pin_spec={
@@ -178,14 +148,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -194,29 +164,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="B_MT", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsBeamTorsionalMoment:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsBeamTorsionalMoment
+ inputs:
+ An instance of InputsBeamTorsionalMoment.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsBeamTorsionalMoment:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsBeamTorsionalMoment
+ outputs:
+ An instance of OutputsBeamTorsionalMoment.
"""
return super().outputs
@@ -265,28 +242,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._unit_system)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -299,14 +263,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Elements scoping required in output.
+ elements scoping required in output.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -319,15 +284,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -340,15 +305,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -361,16 +326,15 @@ def data_sources(self):
return self._data_sources
@property
- def unit_system(self):
- """Allows to connect unit_system input to the operator.
+ def unit_system(self) -> Input:
+ r"""Allows to connect unit_system input to the operator.
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
- Parameters
- ----------
- my_unit_system : int or str or UnitSystem
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -403,18 +367,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.beam_torsional_moment()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/beam_tr_shear_stress.py b/src/ansys/dpf/core/operators/result/beam_tr_shear_stress.py
index aa6e66c17ab..6c10a283239 100644
--- a/src/ansys/dpf/core/operators/result/beam_tr_shear_stress.py
+++ b/src/ansys/dpf/core/operators/result/beam_tr_shear_stress.py
@@ -4,55 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class beam_tr_shear_stress(Operator):
- """Read Beam TR Shear Stress (LSDyna) by calling the readers defined by
- the datasources.
+ r"""Read Beam TR Shear Stress (LSDyna) by calling the readers defined by the
+ datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Elements scoping required in output.
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- integration_point : int, optional
- Integration point where the result will be
- read from. default value: 0 (first
- integration point).
- unit_system : int or str or UnitSystem, optional
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ elements scoping required in output.
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ integration_point: int, optional
+ integration point where the result will be read from. Default value: 0 (first integration point).
+ unit_system: int or str or UnitSystem, optional
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -117,9 +102,10 @@ def __init__(
self.inputs.unit_system.connect(unit_system)
@staticmethod
- def _spec():
- description = """Read Beam TR Shear Stress (LSDyna) by calling the readers defined by
- the datasources."""
+ def _spec() -> Specification:
+ description = r"""Read Beam TR Shear Stress (LSDyna) by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -134,49 +120,31 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Elements scoping required in output.""",
+ document=r"""elements scoping required in output.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
6: PinSpecification(
name="integration_point",
type_names=["int32"],
optional=True,
- document="""Integration point where the result will be
- read from. default value: 0 (first
- integration point).""",
+ document=r"""integration point where the result will be read from. Default value: 0 (first integration point).""",
),
50: PinSpecification(
name="unit_system",
@@ -186,9 +154,7 @@ def _spec():
"class dataProcessing::unit::CUnitSystem",
],
optional=True,
- document="""Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance""",
+ document=r"""Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance""",
),
},
map_output_pin_spec={
@@ -196,14 +162,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -212,29 +178,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="B_ST2", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsBeamTrShearStress:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsBeamTrShearStress
+ inputs:
+ An instance of InputsBeamTrShearStress.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsBeamTrShearStress:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsBeamTrShearStress
+ outputs:
+ An instance of OutputsBeamTrShearStress.
"""
return super().outputs
@@ -283,28 +256,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._unit_system)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -317,14 +277,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Elements scoping required in output.
+ elements scoping required in output.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -337,15 +298,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -358,15 +319,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -379,16 +340,15 @@ def data_sources(self):
return self._data_sources
@property
- def integration_point(self):
- """Allows to connect integration_point input to the operator.
+ def integration_point(self) -> Input:
+ r"""Allows to connect integration_point input to the operator.
- Integration point where the result will be
- read from. default value: 0 (first
- integration point).
+ integration point where the result will be read from. Default value: 0 (first integration point).
- Parameters
- ----------
- my_integration_point : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -401,16 +361,15 @@ def integration_point(self):
return self._integration_point
@property
- def unit_system(self):
- """Allows to connect unit_system input to the operator.
+ def unit_system(self) -> Input:
+ r"""Allows to connect unit_system input to the operator.
- Unit system id (int), semicolon-separated
- list of base unit strings (str) or
- unitsystem instance
+ Unit System ID (int), semicolon-separated list of base unit strings (str) or UnitSystem instance
- Parameters
- ----------
- my_unit_system : int or str or UnitSystem
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -443,18 +402,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.beam_tr_shear_stress()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/cgns_result_provider.py b/src/ansys/dpf/core/operators/result/cgns_result_provider.py
index 92ce7f9468e..8775be6745c 100644
--- a/src/ansys/dpf/core/operators/result/cgns_result_provider.py
+++ b/src/ansys/dpf/core/operators/result/cgns_result_provider.py
@@ -4,46 +4,39 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cgns_result_provider(Operator):
- """Read/compute names result from result streams.
+ r"""Read/compute names result from result streams.
+
Parameters
----------
- time_scoping : Scoping, optional
- Time/freq (use doubles or field), time/freq
- set ids (use ints or scoping) or
- time/freq step ids (use scoping with
- timefreq_steps location) required in
- output
- mesh_scoping : Scoping or ScopingsContainer, optional
- Nodes or elements scoping required in output.
- the scoping's location indicates
- whether nodes or elements are asked.
- using scopings container enables to
- split the result fields container in
- domains
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- result_name : str, optional
- Name of the result to read. by default the
- name of the operator is taken.
- region_scoping : Scoping or int, optional
- Optional zone name/id of the mesh.
+ time_scoping: Scoping, optional
+ time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output
+ mesh_scoping: Scoping or ScopingsContainer, optional
+ nodes or elements scoping required in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ result_name: str, optional
+ name of the result to read. By default the name of the operator is taken.
+ region_scoping: Scoping or int, optional
+ Optional zone name/Id of the mesh.
Returns
-------
- fields : FieldsContainer
+ fields: FieldsContainer
Results
Examples
@@ -111,8 +104,9 @@ def __init__(
self.inputs.region_scoping.connect(region_scoping)
@staticmethod
- def _spec():
- description = """Read/compute names result from result streams."""
+ def _spec() -> Specification:
+ description = r"""Read/compute names result from result streams.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -120,49 +114,37 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""Time/freq (use doubles or field), time/freq
- set ids (use ints or scoping) or
- time/freq step ids (use scoping with
- timefreq_steps location) required in
- output""",
+ document=r"""time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scoping", "scopings_container"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the scoping's location indicates
- whether nodes or elements are asked.
- using scopings container enables to
- split the result fields container in
- domains""",
+ document=r"""nodes or elements scoping required in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
17: PinSpecification(
name="result_name",
type_names=["string"],
optional=True,
- document="""Name of the result to read. by default the
- name of the operator is taken.""",
+ document=r"""name of the result to read. By default the name of the operator is taken.""",
),
25: PinSpecification(
name="region_scoping",
type_names=["scoping", "vector", "int32"],
optional=True,
- document="""Optional zone name/id of the mesh.""",
+ document=r"""Optional zone name/Id of the mesh.""",
),
},
map_output_pin_spec={
@@ -170,14 +152,14 @@ def _spec():
name="fields",
type_names=["fields_container"],
optional=False,
- document="""Results""",
+ document=r"""Results""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -186,31 +168,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="cgns::cgns::result_provider", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsCgnsResultProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCgnsResultProvider
+ inputs:
+ An instance of InputsCgnsResultProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCgnsResultProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCgnsResultProvider
+ outputs:
+ An instance of OutputsCgnsResultProvider.
"""
return super().outputs
@@ -259,18 +248,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._region_scoping)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Time/freq (use doubles or field), time/freq
- set ids (use ints or scoping) or
- time/freq step ids (use scoping with
- timefreq_steps location) required in
- output
+ time/freq (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -283,19 +269,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Nodes or elements scoping required in output.
- the scoping's location indicates
- whether nodes or elements are asked.
- using scopings container enables to
- split the result fields container in
- domains
+ nodes or elements scoping required in output. The scoping's location indicates whether nodes or elements are asked. Using scopings container enables to split the result fields container in domains
- Parameters
- ----------
- my_mesh_scoping : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -308,15 +290,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -329,15 +311,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -350,15 +332,15 @@ def data_sources(self):
return self._data_sources
@property
- def result_name(self):
- """Allows to connect result_name input to the operator.
+ def result_name(self) -> Input:
+ r"""Allows to connect result_name input to the operator.
- Name of the result to read. by default the
- name of the operator is taken.
+ name of the result to read. By default the name of the operator is taken.
- Parameters
- ----------
- my_result_name : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -371,14 +353,15 @@ def result_name(self):
return self._result_name
@property
- def region_scoping(self):
- """Allows to connect region_scoping input to the operator.
+ def region_scoping(self) -> Input:
+ r"""Allows to connect region_scoping input to the operator.
- Optional zone name/id of the mesh.
+ Optional zone name/Id of the mesh.
- Parameters
- ----------
- my_region_scoping : Scoping or int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -409,18 +392,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields)
@property
- def fields(self):
- """Allows to get fields output of the operator
+ def fields(self) -> Output:
+ r"""Allows to get fields output of the operator
+
+ Results
Returns
- ----------
- my_fields : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cgns_result_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields = op.outputs.fields()
- """ # noqa: E501
+ """
return self._fields
diff --git a/src/ansys/dpf/core/operators/result/cms_dst_table_provider.py b/src/ansys/dpf/core/operators/result/cms_dst_table_provider.py
index 0d5d43c07a1..6887ca36151 100644
--- a/src/ansys/dpf/core/operators/result/cms_dst_table_provider.py
+++ b/src/ansys/dpf/core/operators/result/cms_dst_table_provider.py
@@ -4,26 +4,30 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cms_dst_table_provider(Operator):
- """Read CST table from a subfile.
+ r"""Read CST table from a subfile.
+
Parameters
----------
- data_sources : DataSources
- Data_sources (must contain at least one
- subfile).
+ data_sources: DataSources
+ Data_sources (must contain at least one subfile).
Returns
-------
- dst_table : PropertyField
- Returns integer values of the dst table
+ dst_table: PropertyField
+ returns integer values of the dst table
Examples
--------
@@ -53,8 +57,9 @@ def __init__(self, data_sources=None, config=None, server=None):
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Read CST table from a subfile."""
+ def _spec() -> Specification:
+ description = r"""Read CST table from a subfile.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -62,8 +67,7 @@ def _spec():
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data_sources (must contain at least one
- subfile).""",
+ document=r"""Data_sources (must contain at least one subfile).""",
),
},
map_output_pin_spec={
@@ -71,14 +75,14 @@ def _spec():
name="dst_table",
type_names=["property_field"],
optional=False,
- document="""Returns integer values of the dst table""",
+ document=r"""returns integer values of the dst table""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -87,29 +91,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cms_dst_table_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCmsDstTableProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCmsDstTableProvider
+ inputs:
+ An instance of InputsCmsDstTableProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCmsDstTableProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCmsDstTableProvider
+ outputs:
+ An instance of OutputsCmsDstTableProvider.
"""
return super().outputs
@@ -134,15 +145,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data_sources (must contain at least one
- subfile).
+ Data_sources (must contain at least one subfile).
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -173,18 +184,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._dst_table)
@property
- def dst_table(self):
- """Allows to get dst_table output of the operator
+ def dst_table(self) -> Output:
+ r"""Allows to get dst_table output of the operator
+
+ returns integer values of the dst table
Returns
- ----------
- my_dst_table : PropertyField
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cms_dst_table_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dst_table = op.outputs.dst_table()
- """ # noqa: E501
+ """
return self._dst_table
diff --git a/src/ansys/dpf/core/operators/result/cms_matrices_provider.py b/src/ansys/dpf/core/operators/result/cms_matrices_provider.py
index 850fc47cdf1..595194ffaec 100644
--- a/src/ansys/dpf/core/operators/result/cms_matrices_provider.py
+++ b/src/ansys/dpf/core/operators/result/cms_matrices_provider.py
@@ -4,33 +4,33 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cms_matrices_provider(Operator):
- """Read reduced matrices for cms elements. Extract stiffness, damping,
- mass matrices and load vector from a subfile.
+ r"""Read reduced matrices for cms elements. Extract stiffness, damping, mass
+ matrices and load vector from a subfile.
+
Parameters
----------
- data_sources : DataSources
- Data_sources (must contain at list one
- subfile).
- matrix_form : bool
- If this pin i set to true, data are return as
- matrix form.
+ data_sources: DataSources
+ Data_sources (must contain at list one subfile).
+ matrix_form: bool
+ If this pin i set to true, data are return as matrix form.
Returns
-------
- fields_container : FieldsContainer
- Fields container containing in this order :
- stiffness, damping, mass matrices,
- and then load vector. but if pin 200
- is set to true, it's in matrix form.
+ fields_container: FieldsContainer
+ Fields container containing in this order : stiffness, damping, mass matrices, and then load vector. But if pin 200 is set to true, it's in matrix form.
Examples
--------
@@ -65,9 +65,10 @@ def __init__(self, data_sources=None, matrix_form=None, config=None, server=None
self.inputs.matrix_form.connect(matrix_form)
@staticmethod
- def _spec():
- description = """Read reduced matrices for cms elements. Extract stiffness, damping,
- mass matrices and load vector from a subfile."""
+ def _spec() -> Specification:
+ description = r"""Read reduced matrices for cms elements. Extract stiffness, damping, mass
+matrices and load vector from a subfile.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -75,15 +76,13 @@ def _spec():
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data_sources (must contain at list one
- subfile).""",
+ document=r"""Data_sources (must contain at list one subfile).""",
),
200: PinSpecification(
name="matrix_form",
type_names=["bool"],
optional=False,
- document="""If this pin i set to true, data are return as
- matrix form.""",
+ document=r"""If this pin i set to true, data are return as matrix form.""",
),
},
map_output_pin_spec={
@@ -91,17 +90,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fields container containing in this order :
- stiffness, damping, mass matrices,
- and then load vector. but if pin 200
- is set to true, it's in matrix form.""",
+ document=r"""Fields container containing in this order : stiffness, damping, mass matrices, and then load vector. But if pin 200 is set to true, it's in matrix form.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -110,29 +106,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cms_matrices_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCmsMatricesProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCmsMatricesProvider
+ inputs:
+ An instance of InputsCmsMatricesProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCmsMatricesProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCmsMatricesProvider
+ outputs:
+ An instance of OutputsCmsMatricesProvider.
"""
return super().outputs
@@ -163,15 +166,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._matrix_form)
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data_sources (must contain at list one
- subfile).
+ Data_sources (must contain at list one subfile).
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -184,15 +187,15 @@ def data_sources(self):
return self._data_sources
@property
- def matrix_form(self):
- """Allows to connect matrix_form input to the operator.
+ def matrix_form(self) -> Input:
+ r"""Allows to connect matrix_form input to the operator.
- If this pin i set to true, data are return as
- matrix form.
+ If this pin i set to true, data are return as matrix form.
- Parameters
- ----------
- my_matrix_form : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -225,18 +228,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ Fields container containing in this order : stiffness, damping, mass matrices, and then load vector. But if pin 200 is set to true, it's in matrix form.
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cms_matrices_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/cms_subfile_info_provider.py b/src/ansys/dpf/core/operators/result/cms_subfile_info_provider.py
index 6cbd8d7aafb..737ef129904 100644
--- a/src/ansys/dpf/core/operators/result/cms_subfile_info_provider.py
+++ b/src/ansys/dpf/core/operators/result/cms_subfile_info_provider.py
@@ -4,42 +4,36 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cms_subfile_info_provider(Operator):
- """Read required information from a subfile.
+ r"""Read required information from a subfile.
+
Parameters
----------
- data_sources : DataSources
- Data_sources (must contain at least one
- subfile).
- cms_subfile_data : bool
- If this pin i set to true, data are return in
- a field.
- output_maxdof_on_masternodes : bool, optional
- If this pin is set to true, compute and add
- field with max degrees of freedom on
- master nodes
+ data_sources: DataSources
+ Data_sources (must contain at least one subfile).
+ cms_subfile_data: bool
+ If this pin i set to true, data are return in a field.
+ output_maxdof_on_masternodes: bool, optional
+ If this pin is set to true, compute and add field with max degrees of freedom on master nodes
Returns
-------
- int32 : int
- Returns integer values in the order : unit
- system used, stiffness matrix present
- key, damping matrix present key, mass
- matrix present key, number of master
- nodes, number of virtual nodes
- field : PropertyField
- Returns integer values in the order : number
- of load vectors (nvects), number of
- nodes (nnod), number of virtual nodes
- (nvnodes), number of modes (nvmodes)
+ int32: int
+ returns integer values in the order : unit system used, stiffness matrix present key, damping matrix present key, mass matrix present key, number of master nodes, number of virtual nodes
+ field: PropertyField
+ returns integer values in the order : number of load vectors (nvects), number of nodes (nnod), number of virtual nodes (nvnodes), number of modes (nvmodes)
Examples
--------
@@ -89,8 +83,9 @@ def __init__(
)
@staticmethod
- def _spec():
- description = """Read required information from a subfile."""
+ def _spec() -> Specification:
+ description = r"""Read required information from a subfile.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -98,23 +93,19 @@ def _spec():
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data_sources (must contain at least one
- subfile).""",
+ document=r"""Data_sources (must contain at least one subfile).""",
),
200: PinSpecification(
name="cms_subfile_data",
type_names=["bool"],
optional=False,
- document="""If this pin i set to true, data are return in
- a field.""",
+ document=r"""If this pin i set to true, data are return in a field.""",
),
300: PinSpecification(
name="output_maxdof_on_masternodes",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, compute and add
- field with max degrees of freedom on
- master nodes""",
+ document=r"""If this pin is set to true, compute and add field with max degrees of freedom on master nodes""",
),
},
map_output_pin_spec={
@@ -122,27 +113,20 @@ def _spec():
name="int32",
type_names=["int32"],
optional=False,
- document="""Returns integer values in the order : unit
- system used, stiffness matrix present
- key, damping matrix present key, mass
- matrix present key, number of master
- nodes, number of virtual nodes""",
+ document=r"""returns integer values in the order : unit system used, stiffness matrix present key, damping matrix present key, mass matrix present key, number of master nodes, number of virtual nodes""",
),
1: PinSpecification(
name="field",
type_names=["property_field"],
optional=False,
- document="""Returns integer values in the order : number
- of load vectors (nvects), number of
- nodes (nnod), number of virtual nodes
- (nvnodes), number of modes (nvmodes)""",
+ document=r"""returns integer values in the order : number of load vectors (nvects), number of nodes (nnod), number of virtual nodes (nvnodes), number of modes (nvmodes)""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -151,29 +135,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cms_subfile_info_provider", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCmsSubfileInfoProvider:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCmsSubfileInfoProvider
+ inputs:
+ An instance of InputsCmsSubfileInfoProvider.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCmsSubfileInfoProvider:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCmsSubfileInfoProvider
+ outputs:
+ An instance of OutputsCmsSubfileInfoProvider.
"""
return super().outputs
@@ -210,15 +201,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._output_maxdof_on_masternodes)
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data_sources (must contain at least one
- subfile).
+ Data_sources (must contain at least one subfile).
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -231,15 +222,15 @@ def data_sources(self):
return self._data_sources
@property
- def cms_subfile_data(self):
- """Allows to connect cms_subfile_data input to the operator.
+ def cms_subfile_data(self) -> Input:
+ r"""Allows to connect cms_subfile_data input to the operator.
- If this pin i set to true, data are return in
- a field.
+ If this pin i set to true, data are return in a field.
- Parameters
- ----------
- my_cms_subfile_data : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -252,16 +243,15 @@ def cms_subfile_data(self):
return self._cms_subfile_data
@property
- def output_maxdof_on_masternodes(self):
- """Allows to connect output_maxdof_on_masternodes input to the operator.
+ def output_maxdof_on_masternodes(self) -> Input:
+ r"""Allows to connect output_maxdof_on_masternodes input to the operator.
- If this pin is set to true, compute and add
- field with max degrees of freedom on
- master nodes
+ If this pin is set to true, compute and add field with max degrees of freedom on master nodes
- Parameters
- ----------
- my_output_maxdof_on_masternodes : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -295,35 +285,41 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def int32(self):
- """Allows to get int32 output of the operator
+ def int32(self) -> Output:
+ r"""Allows to get int32 output of the operator
+
+ returns integer values in the order : unit system used, stiffness matrix present key, damping matrix present key, mass matrix present key, number of master nodes, number of virtual nodes
Returns
- ----------
- my_int32 : int
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cms_subfile_info_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_int32 = op.outputs.int32()
- """ # noqa: E501
+ """
return self._int32
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
+
+ returns integer values in the order : number of load vectors (nvects), number of nodes (nnod), number of virtual nodes (nvnodes), number of modes (nvmodes)
Returns
- ----------
- my_field : PropertyField
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cms_subfile_info_provider()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/result/co_energy.py b/src/ansys/dpf/core/operators/result/co_energy.py
index 7835dd91bfe..f2be6ec58ce 100644
--- a/src/ansys/dpf/core/operators/result/co_energy.py
+++ b/src/ansys/dpf/core/operators/result/co_energy.py
@@ -4,66 +4,42 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class co_energy(Operator):
- """Read/compute co-energy (magnetics) by calling the readers defined by
- the datasources.
+ r"""Read/compute co-energy (magnetics) by calling the readers defined by the
+ datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -134,9 +110,10 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Read/compute co-energy (magnetics) by calling the readers defined by
- the datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute co-energy (magnetics) by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -151,72 +128,43 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
},
map_output_pin_spec={
@@ -224,14 +172,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -240,29 +188,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ENG_CO", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCoEnergy:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCoEnergy
+ inputs:
+ An instance of InputsCoEnergy.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCoEnergy:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCoEnergy
+ outputs:
+ An instance of OutputsCoEnergy.
"""
return super().outputs
@@ -309,28 +264,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -343,24 +285,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -373,15 +306,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -394,15 +327,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -415,15 +348,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -436,15 +369,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -457,15 +390,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -496,18 +429,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.co_energy()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_invariant_terms_motion.py b/src/ansys/dpf/core/operators/result/compute_invariant_terms_motion.py
index ebfe639e8be..d699c4d0418 100644
--- a/src/ansys/dpf/core/operators/result/compute_invariant_terms_motion.py
+++ b/src/ansys/dpf/core/operators/result/compute_invariant_terms_motion.py
@@ -4,59 +4,59 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_invariant_terms_motion(Operator):
- """Set the required data for the invariant terms computation (reduced
- matrices, lumped mass matrix, modes ...)
+ r"""Set the required data for the invariant terms computation (reduced
+ matrices, lumped mass matrix, modes …)
+
Parameters
----------
- rom_matrices : FieldsContainer
- Fieldscontainers containing the reduced
- matrices
- mode_shapes : FieldsContainer
- Fieldscontainers containing the mode shapes,
- which are cst and nor for the cms
- method
- lumped_mass : FieldsContainer
- Fieldscontainers containing the lumped mass
- model_data : FieldsContainer
- Data describing the finite element model
- field_coordinates : Field
- Coordinates of all nodes
- nod :
+ rom_matrices: FieldsContainer
+ FieldsContainers containing the reduced matrices
+ mode_shapes: FieldsContainer
+ FieldsContainers containing the mode shapes, which are CST and NOR for the cms method
+ lumped_mass: FieldsContainer
+ FieldsContainers containing the lumped mass
+ model_data: FieldsContainer
+ data describing the finite element model
+ field_coordinates: Field
+ coordinates of all nodes
+ nod:
Returns
-------
- model_data : PropertyField
- Data describing the finite element model
- mode_shapes : FieldsContainer
- Fieldscontainers containing the mode shapes,
- which are cst and nor for the cms
- method
- lumped_mass : FieldsContainer
- Fieldscontainers containing the lumped mass
- field_coordinates_and_euler_angles : FieldsContainer
- Coordinates and euler angles of all nodes
- nod :
- used_node_index :
- eigenvalue :
- translational_mode_shape :
- rotational_mode_shape :
- invrt_1 : float
- invrt_2 :
- invrt_3 :
- invrt_4 :
- invrt_5 :
- invrt_6 :
- invrt_7 :
- invrt_8 :
+ model_data: PropertyField
+ data describing the finite element model
+ mode_shapes: FieldsContainer
+ FieldsContainers containing the mode shapes, which are CST and NOR for the cms method
+ lumped_mass: FieldsContainer
+ FieldsContainers containing the lumped mass
+ field_coordinates_and_euler_angles: FieldsContainer
+ coordinates and euler angles of all nodes
+ nod:
+ used_node_index:
+ eigenvalue:
+ translational_mode_shape:
+ rotational_mode_shape:
+ invrt_1: float
+ invrt_2:
+ invrt_3:
+ invrt_4:
+ invrt_5:
+ invrt_6:
+ invrt_7:
+ invrt_8:
Examples
--------
@@ -139,9 +139,10 @@ def __init__(
self.inputs.nod.connect(nod)
@staticmethod
- def _spec():
- description = """Set the required data for the invariant terms computation (reduced
- matrices, lumped mass matrix, modes ...)"""
+ def _spec() -> Specification:
+ description = r"""Set the required data for the invariant terms computation (reduced
+matrices, lumped mass matrix, modes …)
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -149,40 +150,37 @@ def _spec():
name="rom_matrices",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainers containing the reduced
- matrices""",
+ document=r"""FieldsContainers containing the reduced matrices""",
),
1: PinSpecification(
name="mode_shapes",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainers containing the mode shapes,
- which are cst and nor for the cms
- method""",
+ document=r"""FieldsContainers containing the mode shapes, which are CST and NOR for the cms method""",
),
2: PinSpecification(
name="lumped_mass",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainers containing the lumped mass""",
+ document=r"""FieldsContainers containing the lumped mass""",
),
3: PinSpecification(
name="model_data",
type_names=["fields_container"],
optional=False,
- document="""Data describing the finite element model""",
+ document=r"""data describing the finite element model""",
),
4: PinSpecification(
name="field_coordinates",
type_names=["field"],
optional=False,
- document="""Coordinates of all nodes""",
+ document=r"""coordinates of all nodes""",
),
5: PinSpecification(
name="nod",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -190,112 +188,110 @@ def _spec():
name="model_data",
type_names=["property_field"],
optional=False,
- document="""Data describing the finite element model""",
+ document=r"""data describing the finite element model""",
),
1: PinSpecification(
name="mode_shapes",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainers containing the mode shapes,
- which are cst and nor for the cms
- method""",
+ document=r"""FieldsContainers containing the mode shapes, which are CST and NOR for the cms method""",
),
2: PinSpecification(
name="lumped_mass",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainers containing the lumped mass""",
+ document=r"""FieldsContainers containing the lumped mass""",
),
3: PinSpecification(
name="field_coordinates_and_euler_angles",
type_names=["fields_container"],
optional=False,
- document="""Coordinates and euler angles of all nodes""",
+ document=r"""coordinates and euler angles of all nodes""",
),
4: PinSpecification(
name="nod",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
5: PinSpecification(
name="used_node_index",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
6: PinSpecification(
name="eigenvalue",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
7: PinSpecification(
name="translational_mode_shape",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
8: PinSpecification(
name="rotational_mode_shape",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
9: PinSpecification(
name="invrt_1",
type_names=["double"],
optional=False,
- document="""""",
+ document=r"""""",
),
10: PinSpecification(
name="invrt_2",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
11: PinSpecification(
name="invrt_3",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
12: PinSpecification(
name="invrt_4",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
13: PinSpecification(
name="invrt_5",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
14: PinSpecification(
name="invrt_6",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
15: PinSpecification(
name="invrt_7",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
16: PinSpecification(
name="invrt_8",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -304,31 +300,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="compute_invariant_terms_motion", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeInvariantTermsMotion:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeInvariantTermsMotion
+ inputs:
+ An instance of InputsComputeInvariantTermsMotion.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeInvariantTermsMotion:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeInvariantTermsMotion
+ outputs:
+ An instance of OutputsComputeInvariantTermsMotion.
"""
return super().outputs
@@ -383,15 +386,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._nod)
@property
- def rom_matrices(self):
- """Allows to connect rom_matrices input to the operator.
+ def rom_matrices(self) -> Input:
+ r"""Allows to connect rom_matrices input to the operator.
- Fieldscontainers containing the reduced
- matrices
+ FieldsContainers containing the reduced matrices
- Parameters
- ----------
- my_rom_matrices : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -404,16 +407,15 @@ def rom_matrices(self):
return self._rom_matrices
@property
- def mode_shapes(self):
- """Allows to connect mode_shapes input to the operator.
+ def mode_shapes(self) -> Input:
+ r"""Allows to connect mode_shapes input to the operator.
- Fieldscontainers containing the mode shapes,
- which are cst and nor for the cms
- method
+ FieldsContainers containing the mode shapes, which are CST and NOR for the cms method
- Parameters
- ----------
- my_mode_shapes : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -426,14 +428,15 @@ def mode_shapes(self):
return self._mode_shapes
@property
- def lumped_mass(self):
- """Allows to connect lumped_mass input to the operator.
+ def lumped_mass(self) -> Input:
+ r"""Allows to connect lumped_mass input to the operator.
- Fieldscontainers containing the lumped mass
+ FieldsContainers containing the lumped mass
- Parameters
- ----------
- my_lumped_mass : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -446,14 +449,15 @@ def lumped_mass(self):
return self._lumped_mass
@property
- def model_data(self):
- """Allows to connect model_data input to the operator.
+ def model_data(self) -> Input:
+ r"""Allows to connect model_data input to the operator.
- Data describing the finite element model
+ data describing the finite element model
- Parameters
- ----------
- my_model_data : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -466,14 +470,15 @@ def model_data(self):
return self._model_data
@property
- def field_coordinates(self):
- """Allows to connect field_coordinates input to the operator.
+ def field_coordinates(self) -> Input:
+ r"""Allows to connect field_coordinates input to the operator.
- Coordinates of all nodes
+ coordinates of all nodes
- Parameters
- ----------
- my_field_coordinates : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -486,12 +491,13 @@ def field_coordinates(self):
return self._field_coordinates
@property
- def nod(self):
- """Allows to connect nod input to the operator.
+ def nod(self) -> Input:
+ r"""Allows to connect nod input to the operator.
- Parameters
- ----------
- my_nod :
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -602,290 +608,315 @@ def __init__(self, op: Operator):
self._outputs.append(self._invrt_8)
@property
- def model_data(self):
- """Allows to get model_data output of the operator
+ def model_data(self) -> Output:
+ r"""Allows to get model_data output of the operator
+
+ data describing the finite element model
Returns
- ----------
- my_model_data : PropertyField
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_model_data = op.outputs.model_data()
- """ # noqa: E501
+ """
return self._model_data
@property
- def mode_shapes(self):
- """Allows to get mode_shapes output of the operator
+ def mode_shapes(self) -> Output:
+ r"""Allows to get mode_shapes output of the operator
+
+ FieldsContainers containing the mode shapes, which are CST and NOR for the cms method
Returns
- ----------
- my_mode_shapes : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mode_shapes = op.outputs.mode_shapes()
- """ # noqa: E501
+ """
return self._mode_shapes
@property
- def lumped_mass(self):
- """Allows to get lumped_mass output of the operator
+ def lumped_mass(self) -> Output:
+ r"""Allows to get lumped_mass output of the operator
+
+ FieldsContainers containing the lumped mass
Returns
- ----------
- my_lumped_mass : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_lumped_mass = op.outputs.lumped_mass()
- """ # noqa: E501
+ """
return self._lumped_mass
@property
- def field_coordinates_and_euler_angles(self):
- """Allows to get field_coordinates_and_euler_angles output of the operator
+ def field_coordinates_and_euler_angles(self) -> Output:
+ r"""Allows to get field_coordinates_and_euler_angles output of the operator
+
+ coordinates and euler angles of all nodes
Returns
- ----------
- my_field_coordinates_and_euler_angles : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_coordinates_and_euler_angles = op.outputs.field_coordinates_and_euler_angles()
- """ # noqa: E501
+ """
return self._field_coordinates_and_euler_angles
@property
- def nod(self):
- """Allows to get nod output of the operator
+ def nod(self) -> Output:
+ r"""Allows to get nod output of the operator
Returns
- ----------
- my_nod :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_nod = op.outputs.nod()
- """ # noqa: E501
+ """
return self._nod
@property
- def used_node_index(self):
- """Allows to get used_node_index output of the operator
+ def used_node_index(self) -> Output:
+ r"""Allows to get used_node_index output of the operator
Returns
- ----------
- my_used_node_index :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_used_node_index = op.outputs.used_node_index()
- """ # noqa: E501
+ """
return self._used_node_index
@property
- def eigenvalue(self):
- """Allows to get eigenvalue output of the operator
+ def eigenvalue(self) -> Output:
+ r"""Allows to get eigenvalue output of the operator
Returns
- ----------
- my_eigenvalue :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_eigenvalue = op.outputs.eigenvalue()
- """ # noqa: E501
+ """
return self._eigenvalue
@property
- def translational_mode_shape(self):
- """Allows to get translational_mode_shape output of the operator
+ def translational_mode_shape(self) -> Output:
+ r"""Allows to get translational_mode_shape output of the operator
Returns
- ----------
- my_translational_mode_shape :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_translational_mode_shape = op.outputs.translational_mode_shape()
- """ # noqa: E501
+ """
return self._translational_mode_shape
@property
- def rotational_mode_shape(self):
- """Allows to get rotational_mode_shape output of the operator
+ def rotational_mode_shape(self) -> Output:
+ r"""Allows to get rotational_mode_shape output of the operator
Returns
- ----------
- my_rotational_mode_shape :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_rotational_mode_shape = op.outputs.rotational_mode_shape()
- """ # noqa: E501
+ """
return self._rotational_mode_shape
@property
- def invrt_1(self):
- """Allows to get invrt_1 output of the operator
+ def invrt_1(self) -> Output:
+ r"""Allows to get invrt_1 output of the operator
Returns
- ----------
- my_invrt_1 : float
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_invrt_1 = op.outputs.invrt_1()
- """ # noqa: E501
+ """
return self._invrt_1
@property
- def invrt_2(self):
- """Allows to get invrt_2 output of the operator
+ def invrt_2(self) -> Output:
+ r"""Allows to get invrt_2 output of the operator
Returns
- ----------
- my_invrt_2 :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_invrt_2 = op.outputs.invrt_2()
- """ # noqa: E501
+ """
return self._invrt_2
@property
- def invrt_3(self):
- """Allows to get invrt_3 output of the operator
+ def invrt_3(self) -> Output:
+ r"""Allows to get invrt_3 output of the operator
Returns
- ----------
- my_invrt_3 :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_invrt_3 = op.outputs.invrt_3()
- """ # noqa: E501
+ """
return self._invrt_3
@property
- def invrt_4(self):
- """Allows to get invrt_4 output of the operator
+ def invrt_4(self) -> Output:
+ r"""Allows to get invrt_4 output of the operator
Returns
- ----------
- my_invrt_4 :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_invrt_4 = op.outputs.invrt_4()
- """ # noqa: E501
+ """
return self._invrt_4
@property
- def invrt_5(self):
- """Allows to get invrt_5 output of the operator
+ def invrt_5(self) -> Output:
+ r"""Allows to get invrt_5 output of the operator
Returns
- ----------
- my_invrt_5 :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_invrt_5 = op.outputs.invrt_5()
- """ # noqa: E501
+ """
return self._invrt_5
@property
- def invrt_6(self):
- """Allows to get invrt_6 output of the operator
+ def invrt_6(self) -> Output:
+ r"""Allows to get invrt_6 output of the operator
Returns
- ----------
- my_invrt_6 :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_invrt_6 = op.outputs.invrt_6()
- """ # noqa: E501
+ """
return self._invrt_6
@property
- def invrt_7(self):
- """Allows to get invrt_7 output of the operator
+ def invrt_7(self) -> Output:
+ r"""Allows to get invrt_7 output of the operator
Returns
- ----------
- my_invrt_7 :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_invrt_7 = op.outputs.invrt_7()
- """ # noqa: E501
+ """
return self._invrt_7
@property
- def invrt_8(self):
- """Allows to get invrt_8 output of the operator
+ def invrt_8(self) -> Output:
+ r"""Allows to get invrt_8 output of the operator
Returns
- ----------
- my_invrt_8 :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_motion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_invrt_8 = op.outputs.invrt_8()
- """ # noqa: E501
+ """
return self._invrt_8
diff --git a/src/ansys/dpf/core/operators/result/compute_invariant_terms_rbd.py b/src/ansys/dpf/core/operators/result/compute_invariant_terms_rbd.py
index 986d037bc62..7e792ab4c14 100644
--- a/src/ansys/dpf/core/operators/result/compute_invariant_terms_rbd.py
+++ b/src/ansys/dpf/core/operators/result/compute_invariant_terms_rbd.py
@@ -4,71 +4,72 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_invariant_terms_rbd(Operator):
- """Set the required data for the invariant terms computation (reduced
- matrices, lumped mass matrix, modes ...)
+ r"""Set the required data for the invariant terms computation (reduced
+ matrices, lumped mass matrix, modes …)
+
Parameters
----------
- rom_matrices : FieldsContainer
- Fieldscontainers containing the reduced
- matrices
- mode_shapes : FieldsContainer
- Fieldscontainers containing the mode shapes,
- which are cst and nor for the cms
- method
- lumped_mass : FieldsContainer
- Fieldscontainers containing the lumped mass
- model_data : FieldsContainer
- Data describing the finite element model
- center_of_mass : FieldsContainer
- inertia_relief : FieldsContainer
- Inertia matrix
- model_size : float
- Model size
- field_coordinates : Field
- Coordinates of all nodes
- nod :
- constraint_mode_check : bool, optional
- If true, the orthogonality of the constraint
- modes are checked. default is false.
+ rom_matrices: FieldsContainer
+ FieldsContainers containing the reduced matrices
+ mode_shapes: FieldsContainer
+ FieldsContainers containing the mode shapes, which are CST and NOR for the cms method
+ lumped_mass: FieldsContainer
+ FieldsContainers containing the lumped mass
+ model_data: FieldsContainer
+ data describing the finite element model
+ center_of_mass: FieldsContainer
+ inertia_relief: FieldsContainer
+ inertia matrix
+ model_size: float
+ model size
+ field_coordinates: Field
+ coordinates of all nodes
+ nod:
+ constraint_mode_check: bool, optional
+ if true, the orthogonality of the constraint modes are checked. Default is false.
Returns
-------
- model_data : PropertyField
- Data describing the finite element model
- center_of_mass : Field
- Center of mass of the body
- inertia_relief : Field
- Inertia matrix
- model_size : PropertyField
- master_node_coordinates :
- v_trsf :
- Translational and rotational shape functions
- k_mat : Field
- mass_mat : Field
- c_mat : Field
- rhs : Field
- dn :
- dr_cross_n :
- drn :
- dn_cross_n :
- dnx_y :
- dny_y :
- dnz_y :
- dyx_n :
- dyy_n :
- dyz_n :
- dnxn :
- dnyn :
- dnzn :
+ model_data: PropertyField
+ data describing the finite element model
+ center_of_mass: Field
+ center of mass of the body
+ inertia_relief: Field
+ inertia matrix
+ model_size: PropertyField
+ master_node_coordinates:
+ v_trsf:
+ translational and rotational shape functions
+ k_mat: Field
+ mass_mat: Field
+ c_mat: Field
+ rhs: Field
+ dn:
+ dr_cross_n:
+ drn:
+ dn_cross_n:
+ dnx_y:
+ dny_y:
+ dnz_y:
+ dyx_n:
+ dyy_n:
+ dyz_n:
+ dnxn:
+ dnyn:
+ dnzn:
Examples
--------
@@ -181,9 +182,10 @@ def __init__(
self.inputs.constraint_mode_check.connect(constraint_mode_check)
@staticmethod
- def _spec():
- description = """Set the required data for the invariant terms computation (reduced
- matrices, lumped mass matrix, modes ...)"""
+ def _spec() -> Specification:
+ description = r"""Set the required data for the invariant terms computation (reduced
+matrices, lumped mass matrix, modes …)
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -191,65 +193,61 @@ def _spec():
name="rom_matrices",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainers containing the reduced
- matrices""",
+ document=r"""FieldsContainers containing the reduced matrices""",
),
1: PinSpecification(
name="mode_shapes",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainers containing the mode shapes,
- which are cst and nor for the cms
- method""",
+ document=r"""FieldsContainers containing the mode shapes, which are CST and NOR for the cms method""",
),
2: PinSpecification(
name="lumped_mass",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainers containing the lumped mass""",
+ document=r"""FieldsContainers containing the lumped mass""",
),
3: PinSpecification(
name="model_data",
type_names=["fields_container"],
optional=False,
- document="""Data describing the finite element model""",
+ document=r"""data describing the finite element model""",
),
4: PinSpecification(
name="center_of_mass",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
5: PinSpecification(
name="inertia_relief",
type_names=["fields_container"],
optional=False,
- document="""Inertia matrix""",
+ document=r"""inertia matrix""",
),
6: PinSpecification(
name="model_size",
type_names=["double"],
optional=False,
- document="""Model size""",
+ document=r"""model size""",
),
7: PinSpecification(
name="field_coordinates",
type_names=["field"],
optional=False,
- document="""Coordinates of all nodes""",
+ document=r"""coordinates of all nodes""",
),
8: PinSpecification(
name="nod",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
9: PinSpecification(
name="constraint_mode_check",
type_names=["bool"],
optional=True,
- document="""If true, the orthogonality of the constraint
- modes are checked. default is false.""",
+ document=r"""if true, the orthogonality of the constraint modes are checked. Default is false.""",
),
},
map_output_pin_spec={
@@ -257,146 +255,146 @@ def _spec():
name="model_data",
type_names=["property_field"],
optional=False,
- document="""Data describing the finite element model""",
+ document=r"""data describing the finite element model""",
),
1: PinSpecification(
name="center_of_mass",
type_names=["field"],
optional=False,
- document="""Center of mass of the body""",
+ document=r"""center of mass of the body""",
),
2: PinSpecification(
name="inertia_relief",
type_names=["field"],
optional=False,
- document="""Inertia matrix""",
+ document=r"""inertia matrix""",
),
3: PinSpecification(
name="model_size",
type_names=["property_field"],
optional=False,
- document="""""",
+ document=r"""""",
),
4: PinSpecification(
name="master_node_coordinates",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
5: PinSpecification(
name="v_trsf",
type_names=["vector"],
optional=False,
- document="""Translational and rotational shape functions""",
+ document=r"""translational and rotational shape functions""",
),
6: PinSpecification(
name="k_mat",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
7: PinSpecification(
name="mass_mat",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
8: PinSpecification(
name="c_mat",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
9: PinSpecification(
name="rhs",
type_names=["field"],
optional=False,
- document="""""",
+ document=r"""""",
),
10: PinSpecification(
name="dn",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
11: PinSpecification(
name="dr_cross_n",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
12: PinSpecification(
name="drn",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
13: PinSpecification(
name="dn_cross_n",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
14: PinSpecification(
name="dnx_y",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
15: PinSpecification(
name="dny_y",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
16: PinSpecification(
name="dnz_y",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
17: PinSpecification(
name="dyx_n",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
18: PinSpecification(
name="dyy_n",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
19: PinSpecification(
name="dyz_n",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
20: PinSpecification(
name="dnxn",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
21: PinSpecification(
name="dnyn",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
22: PinSpecification(
name="dnzn",
type_names=["vector"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -405,31 +403,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="compute_invariant_terms_rbd", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeInvariantTermsRbd:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeInvariantTermsRbd
+ inputs:
+ An instance of InputsComputeInvariantTermsRbd.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeInvariantTermsRbd:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeInvariantTermsRbd
+ outputs:
+ An instance of OutputsComputeInvariantTermsRbd.
"""
return super().outputs
@@ -506,15 +511,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._constraint_mode_check)
@property
- def rom_matrices(self):
- """Allows to connect rom_matrices input to the operator.
+ def rom_matrices(self) -> Input:
+ r"""Allows to connect rom_matrices input to the operator.
- Fieldscontainers containing the reduced
- matrices
+ FieldsContainers containing the reduced matrices
- Parameters
- ----------
- my_rom_matrices : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -527,16 +532,15 @@ def rom_matrices(self):
return self._rom_matrices
@property
- def mode_shapes(self):
- """Allows to connect mode_shapes input to the operator.
+ def mode_shapes(self) -> Input:
+ r"""Allows to connect mode_shapes input to the operator.
- Fieldscontainers containing the mode shapes,
- which are cst and nor for the cms
- method
+ FieldsContainers containing the mode shapes, which are CST and NOR for the cms method
- Parameters
- ----------
- my_mode_shapes : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -549,14 +553,15 @@ def mode_shapes(self):
return self._mode_shapes
@property
- def lumped_mass(self):
- """Allows to connect lumped_mass input to the operator.
+ def lumped_mass(self) -> Input:
+ r"""Allows to connect lumped_mass input to the operator.
- Fieldscontainers containing the lumped mass
+ FieldsContainers containing the lumped mass
- Parameters
- ----------
- my_lumped_mass : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -569,14 +574,15 @@ def lumped_mass(self):
return self._lumped_mass
@property
- def model_data(self):
- """Allows to connect model_data input to the operator.
+ def model_data(self) -> Input:
+ r"""Allows to connect model_data input to the operator.
- Data describing the finite element model
+ data describing the finite element model
- Parameters
- ----------
- my_model_data : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -589,12 +595,13 @@ def model_data(self):
return self._model_data
@property
- def center_of_mass(self):
- """Allows to connect center_of_mass input to the operator.
+ def center_of_mass(self) -> Input:
+ r"""Allows to connect center_of_mass input to the operator.
- Parameters
- ----------
- my_center_of_mass : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -607,14 +614,15 @@ def center_of_mass(self):
return self._center_of_mass
@property
- def inertia_relief(self):
- """Allows to connect inertia_relief input to the operator.
+ def inertia_relief(self) -> Input:
+ r"""Allows to connect inertia_relief input to the operator.
- Inertia matrix
+ inertia matrix
- Parameters
- ----------
- my_inertia_relief : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -627,14 +635,15 @@ def inertia_relief(self):
return self._inertia_relief
@property
- def model_size(self):
- """Allows to connect model_size input to the operator.
+ def model_size(self) -> Input:
+ r"""Allows to connect model_size input to the operator.
- Model size
+ model size
- Parameters
- ----------
- my_model_size : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -647,14 +656,15 @@ def model_size(self):
return self._model_size
@property
- def field_coordinates(self):
- """Allows to connect field_coordinates input to the operator.
+ def field_coordinates(self) -> Input:
+ r"""Allows to connect field_coordinates input to the operator.
- Coordinates of all nodes
+ coordinates of all nodes
- Parameters
- ----------
- my_field_coordinates : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -667,12 +677,13 @@ def field_coordinates(self):
return self._field_coordinates
@property
- def nod(self):
- """Allows to connect nod input to the operator.
+ def nod(self) -> Input:
+ r"""Allows to connect nod input to the operator.
- Parameters
- ----------
- my_nod :
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -685,15 +696,15 @@ def nod(self):
return self._nod
@property
- def constraint_mode_check(self):
- """Allows to connect constraint_mode_check input to the operator.
+ def constraint_mode_check(self) -> Input:
+ r"""Allows to connect constraint_mode_check input to the operator.
- If true, the orthogonality of the constraint
- modes are checked. default is false.
+ if true, the orthogonality of the constraint modes are checked. Default is false.
- Parameters
- ----------
- my_constraint_mode_check : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -806,392 +817,423 @@ def __init__(self, op: Operator):
self._outputs.append(self._dnzn)
@property
- def model_data(self):
- """Allows to get model_data output of the operator
+ def model_data(self) -> Output:
+ r"""Allows to get model_data output of the operator
+
+ data describing the finite element model
Returns
- ----------
- my_model_data : PropertyField
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_model_data = op.outputs.model_data()
- """ # noqa: E501
+ """
return self._model_data
@property
- def center_of_mass(self):
- """Allows to get center_of_mass output of the operator
+ def center_of_mass(self) -> Output:
+ r"""Allows to get center_of_mass output of the operator
+
+ center of mass of the body
Returns
- ----------
- my_center_of_mass : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_center_of_mass = op.outputs.center_of_mass()
- """ # noqa: E501
+ """
return self._center_of_mass
@property
- def inertia_relief(self):
- """Allows to get inertia_relief output of the operator
+ def inertia_relief(self) -> Output:
+ r"""Allows to get inertia_relief output of the operator
+
+ inertia matrix
Returns
- ----------
- my_inertia_relief : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_inertia_relief = op.outputs.inertia_relief()
- """ # noqa: E501
+ """
return self._inertia_relief
@property
- def model_size(self):
- """Allows to get model_size output of the operator
+ def model_size(self) -> Output:
+ r"""Allows to get model_size output of the operator
Returns
- ----------
- my_model_size : PropertyField
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_model_size = op.outputs.model_size()
- """ # noqa: E501
+ """
return self._model_size
@property
- def master_node_coordinates(self):
- """Allows to get master_node_coordinates output of the operator
+ def master_node_coordinates(self) -> Output:
+ r"""Allows to get master_node_coordinates output of the operator
Returns
- ----------
- my_master_node_coordinates :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_master_node_coordinates = op.outputs.master_node_coordinates()
- """ # noqa: E501
+ """
return self._master_node_coordinates
@property
- def v_trsf(self):
- """Allows to get v_trsf output of the operator
+ def v_trsf(self) -> Output:
+ r"""Allows to get v_trsf output of the operator
+
+ translational and rotational shape functions
Returns
- ----------
- my_v_trsf :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_v_trsf = op.outputs.v_trsf()
- """ # noqa: E501
+ """
return self._v_trsf
@property
- def k_mat(self):
- """Allows to get k_mat output of the operator
+ def k_mat(self) -> Output:
+ r"""Allows to get k_mat output of the operator
Returns
- ----------
- my_k_mat : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_k_mat = op.outputs.k_mat()
- """ # noqa: E501
+ """
return self._k_mat
@property
- def mass_mat(self):
- """Allows to get mass_mat output of the operator
+ def mass_mat(self) -> Output:
+ r"""Allows to get mass_mat output of the operator
Returns
- ----------
- my_mass_mat : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_mass_mat = op.outputs.mass_mat()
- """ # noqa: E501
+ """
return self._mass_mat
@property
- def c_mat(self):
- """Allows to get c_mat output of the operator
+ def c_mat(self) -> Output:
+ r"""Allows to get c_mat output of the operator
Returns
- ----------
- my_c_mat : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_c_mat = op.outputs.c_mat()
- """ # noqa: E501
+ """
return self._c_mat
@property
- def rhs(self):
- """Allows to get rhs output of the operator
+ def rhs(self) -> Output:
+ r"""Allows to get rhs output of the operator
Returns
- ----------
- my_rhs : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_rhs = op.outputs.rhs()
- """ # noqa: E501
+ """
return self._rhs
@property
- def dn(self):
- """Allows to get dn output of the operator
+ def dn(self) -> Output:
+ r"""Allows to get dn output of the operator
Returns
- ----------
- my_dn :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dn = op.outputs.dn()
- """ # noqa: E501
+ """
return self._dn
@property
- def dr_cross_n(self):
- """Allows to get dr_cross_n output of the operator
+ def dr_cross_n(self) -> Output:
+ r"""Allows to get dr_cross_n output of the operator
Returns
- ----------
- my_dr_cross_n :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dr_cross_n = op.outputs.dr_cross_n()
- """ # noqa: E501
+ """
return self._dr_cross_n
@property
- def drn(self):
- """Allows to get drn output of the operator
+ def drn(self) -> Output:
+ r"""Allows to get drn output of the operator
Returns
- ----------
- my_drn :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_drn = op.outputs.drn()
- """ # noqa: E501
+ """
return self._drn
@property
- def dn_cross_n(self):
- """Allows to get dn_cross_n output of the operator
+ def dn_cross_n(self) -> Output:
+ r"""Allows to get dn_cross_n output of the operator
Returns
- ----------
- my_dn_cross_n :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dn_cross_n = op.outputs.dn_cross_n()
- """ # noqa: E501
+ """
return self._dn_cross_n
@property
- def dnx_y(self):
- """Allows to get dnx_y output of the operator
+ def dnx_y(self) -> Output:
+ r"""Allows to get dnx_y output of the operator
Returns
- ----------
- my_dnx_y :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dnx_y = op.outputs.dnx_y()
- """ # noqa: E501
+ """
return self._dnx_y
@property
- def dny_y(self):
- """Allows to get dny_y output of the operator
+ def dny_y(self) -> Output:
+ r"""Allows to get dny_y output of the operator
Returns
- ----------
- my_dny_y :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dny_y = op.outputs.dny_y()
- """ # noqa: E501
+ """
return self._dny_y
@property
- def dnz_y(self):
- """Allows to get dnz_y output of the operator
+ def dnz_y(self) -> Output:
+ r"""Allows to get dnz_y output of the operator
Returns
- ----------
- my_dnz_y :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dnz_y = op.outputs.dnz_y()
- """ # noqa: E501
+ """
return self._dnz_y
@property
- def dyx_n(self):
- """Allows to get dyx_n output of the operator
+ def dyx_n(self) -> Output:
+ r"""Allows to get dyx_n output of the operator
Returns
- ----------
- my_dyx_n :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dyx_n = op.outputs.dyx_n()
- """ # noqa: E501
+ """
return self._dyx_n
@property
- def dyy_n(self):
- """Allows to get dyy_n output of the operator
+ def dyy_n(self) -> Output:
+ r"""Allows to get dyy_n output of the operator
Returns
- ----------
- my_dyy_n :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dyy_n = op.outputs.dyy_n()
- """ # noqa: E501
+ """
return self._dyy_n
@property
- def dyz_n(self):
- """Allows to get dyz_n output of the operator
+ def dyz_n(self) -> Output:
+ r"""Allows to get dyz_n output of the operator
Returns
- ----------
- my_dyz_n :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dyz_n = op.outputs.dyz_n()
- """ # noqa: E501
+ """
return self._dyz_n
@property
- def dnxn(self):
- """Allows to get dnxn output of the operator
+ def dnxn(self) -> Output:
+ r"""Allows to get dnxn output of the operator
Returns
- ----------
- my_dnxn :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dnxn = op.outputs.dnxn()
- """ # noqa: E501
+ """
return self._dnxn
@property
- def dnyn(self):
- """Allows to get dnyn output of the operator
+ def dnyn(self) -> Output:
+ r"""Allows to get dnyn output of the operator
Returns
- ----------
- my_dnyn :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dnyn = op.outputs.dnyn()
- """ # noqa: E501
+ """
return self._dnyn
@property
- def dnzn(self):
- """Allows to get dnzn output of the operator
+ def dnzn(self) -> Output:
+ r"""Allows to get dnzn output of the operator
Returns
- ----------
- my_dnzn :
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_invariant_terms_rbd()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_dnzn = op.outputs.dnzn()
- """ # noqa: E501
+ """
return self._dnzn
diff --git a/src/ansys/dpf/core/operators/result/compute_stress.py b/src/ansys/dpf/core/operators/result/compute_stress.py
index d072c0846d6..6b7abf113ca 100644
--- a/src/ansys/dpf/core/operators/result/compute_stress.py
+++ b/src/ansys/dpf/core/operators/result/compute_stress.py
@@ -4,42 +4,39 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_stress(Operator):
- """Computes the stress from an elastic strain field. compute_total_strain
+ r"""Computes the stress from an elastic strain field. compute_total_strain
limitations are applicable for stress computation
+
Parameters
----------
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
- data_sources : DataSources, optional
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- strain : FieldsContainer or Field
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
+ data_sources: DataSources, optional
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ strain: FieldsContainer or Field
+ Field/or fields container containing only the elastic strain field (element nodal).
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -98,9 +95,10 @@ def __init__(
self.inputs.strain.connect(strain)
@staticmethod
- def _spec():
- description = """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation"""
+ def _spec() -> Specification:
+ description = r"""Computes the stress from an elastic strain field. compute_total_strain
+limitations are applicable for stress computation
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -108,38 +106,31 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a data_sources have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a streams_container have been connected.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="strain",
type_names=["fields_container", "field"],
optional=False,
- document="""Field/or fields container containing only the
- elastic strain field (element nodal).""",
+ document=r"""Field/or fields container containing only the elastic strain field (element nodal).""",
),
},
map_output_pin_spec={
@@ -147,15 +138,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -164,29 +154,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_stress", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeStress:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeStress
+ inputs:
+ An instance of InputsComputeStress.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeStress:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeStress
+ outputs:
+ An instance of OutputsComputeStress.
"""
return super().outputs
@@ -225,15 +222,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._strain)
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -246,16 +243,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -268,16 +264,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -290,15 +285,15 @@ def data_sources(self):
return self._data_sources
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -311,15 +306,15 @@ def requested_location(self):
return self._requested_location
@property
- def strain(self):
- """Allows to connect strain input to the operator.
+ def strain(self) -> Input:
+ r"""Allows to connect strain input to the operator.
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ Field/or fields container containing only the elastic strain field (element nodal).
- Parameters
- ----------
- my_strain : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -350,18 +345,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_stress()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_stress_1.py b/src/ansys/dpf/core/operators/result/compute_stress_1.py
index 3eeac1257b0..b25825387f5 100644
--- a/src/ansys/dpf/core/operators/result/compute_stress_1.py
+++ b/src/ansys/dpf/core/operators/result/compute_stress_1.py
@@ -4,43 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_stress_1(Operator):
- """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation Get the 1st
- principal component.
+ r"""Computes the stress from an elastic strain field. compute_total_strain
+ limitations are applicable for stress computation Get the 1st principal
+ component.
+
Parameters
----------
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
- data_sources : DataSources, optional
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- strain : FieldsContainer or Field
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
+ data_sources: DataSources, optional
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ strain: FieldsContainer or Field
+ Field/or fields container containing only the elastic strain field (element nodal).
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -99,10 +96,11 @@ def __init__(
self.inputs.strain.connect(strain)
@staticmethod
- def _spec():
- description = """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation Get the
- 1st principal component."""
+ def _spec() -> Specification:
+ description = r"""Computes the stress from an elastic strain field. compute_total_strain
+limitations are applicable for stress computation Get the 1st principal
+component.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -110,38 +108,31 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a data_sources have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a streams_container have been connected.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="strain",
type_names=["fields_container", "field"],
optional=False,
- document="""Field/or fields container containing only the
- elastic strain field (element nodal).""",
+ document=r"""Field/or fields container containing only the elastic strain field (element nodal).""",
),
},
map_output_pin_spec={
@@ -149,15 +140,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -166,29 +156,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_stress_1", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeStress1:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeStress1
+ inputs:
+ An instance of InputsComputeStress1.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeStress1:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeStress1
+ outputs:
+ An instance of OutputsComputeStress1.
"""
return super().outputs
@@ -231,15 +228,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._strain)
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -252,16 +249,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -274,16 +270,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -296,15 +291,15 @@ def data_sources(self):
return self._data_sources
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -317,15 +312,15 @@ def requested_location(self):
return self._requested_location
@property
- def strain(self):
- """Allows to connect strain input to the operator.
+ def strain(self) -> Input:
+ r"""Allows to connect strain input to the operator.
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ Field/or fields container containing only the elastic strain field (element nodal).
- Parameters
- ----------
- my_strain : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -356,18 +351,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_stress_1()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_stress_2.py b/src/ansys/dpf/core/operators/result/compute_stress_2.py
index f3ddb231233..41fdc2ff86e 100644
--- a/src/ansys/dpf/core/operators/result/compute_stress_2.py
+++ b/src/ansys/dpf/core/operators/result/compute_stress_2.py
@@ -4,43 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_stress_2(Operator):
- """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation Get the 2nd
- principal component.
+ r"""Computes the stress from an elastic strain field. compute_total_strain
+ limitations are applicable for stress computation Get the 2nd principal
+ component.
+
Parameters
----------
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
- data_sources : DataSources, optional
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- strain : FieldsContainer or Field
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
+ data_sources: DataSources, optional
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ strain: FieldsContainer or Field
+ Field/or fields container containing only the elastic strain field (element nodal).
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -99,10 +96,11 @@ def __init__(
self.inputs.strain.connect(strain)
@staticmethod
- def _spec():
- description = """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation Get the
- 2nd principal component."""
+ def _spec() -> Specification:
+ description = r"""Computes the stress from an elastic strain field. compute_total_strain
+limitations are applicable for stress computation Get the 2nd principal
+component.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -110,38 +108,31 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a data_sources have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a streams_container have been connected.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="strain",
type_names=["fields_container", "field"],
optional=False,
- document="""Field/or fields container containing only the
- elastic strain field (element nodal).""",
+ document=r"""Field/or fields container containing only the elastic strain field (element nodal).""",
),
},
map_output_pin_spec={
@@ -149,15 +140,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -166,29 +156,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_stress_2", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeStress2:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeStress2
+ inputs:
+ An instance of InputsComputeStress2.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeStress2:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeStress2
+ outputs:
+ An instance of OutputsComputeStress2.
"""
return super().outputs
@@ -231,15 +228,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._strain)
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -252,16 +249,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -274,16 +270,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -296,15 +291,15 @@ def data_sources(self):
return self._data_sources
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -317,15 +312,15 @@ def requested_location(self):
return self._requested_location
@property
- def strain(self):
- """Allows to connect strain input to the operator.
+ def strain(self) -> Input:
+ r"""Allows to connect strain input to the operator.
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ Field/or fields container containing only the elastic strain field (element nodal).
- Parameters
- ----------
- my_strain : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -356,18 +351,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_stress_2()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_stress_3.py b/src/ansys/dpf/core/operators/result/compute_stress_3.py
index 6c151943559..98edba8b928 100644
--- a/src/ansys/dpf/core/operators/result/compute_stress_3.py
+++ b/src/ansys/dpf/core/operators/result/compute_stress_3.py
@@ -4,43 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_stress_3(Operator):
- """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation Get the 3rd
- principal component.
+ r"""Computes the stress from an elastic strain field. compute_total_strain
+ limitations are applicable for stress computation Get the 3rd principal
+ component.
+
Parameters
----------
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
- data_sources : DataSources, optional
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- strain : FieldsContainer or Field
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
+ data_sources: DataSources, optional
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ strain: FieldsContainer or Field
+ Field/or fields container containing only the elastic strain field (element nodal).
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -99,10 +96,11 @@ def __init__(
self.inputs.strain.connect(strain)
@staticmethod
- def _spec():
- description = """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation Get the
- 3rd principal component."""
+ def _spec() -> Specification:
+ description = r"""Computes the stress from an elastic strain field. compute_total_strain
+limitations are applicable for stress computation Get the 3rd principal
+component.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -110,38 +108,31 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a data_sources have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a streams_container have been connected.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="strain",
type_names=["fields_container", "field"],
optional=False,
- document="""Field/or fields container containing only the
- elastic strain field (element nodal).""",
+ document=r"""Field/or fields container containing only the elastic strain field (element nodal).""",
),
},
map_output_pin_spec={
@@ -149,15 +140,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -166,29 +156,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_stress_3", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeStress3:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeStress3
+ inputs:
+ An instance of InputsComputeStress3.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeStress3:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeStress3
+ outputs:
+ An instance of OutputsComputeStress3.
"""
return super().outputs
@@ -231,15 +228,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._strain)
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -252,16 +249,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -274,16 +270,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -296,15 +291,15 @@ def data_sources(self):
return self._data_sources
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -317,15 +312,15 @@ def requested_location(self):
return self._requested_location
@property
- def strain(self):
- """Allows to connect strain input to the operator.
+ def strain(self) -> Input:
+ r"""Allows to connect strain input to the operator.
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ Field/or fields container containing only the elastic strain field (element nodal).
- Parameters
- ----------
- my_strain : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -356,18 +351,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_stress_3()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_stress_X.py b/src/ansys/dpf/core/operators/result/compute_stress_X.py
index c8abfaa6cb1..6ad08dcc7ad 100644
--- a/src/ansys/dpf/core/operators/result/compute_stress_X.py
+++ b/src/ansys/dpf/core/operators/result/compute_stress_X.py
@@ -4,43 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_stress_X(Operator):
- """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation Get the XX
- normal component (00 component).
+ r"""Computes the stress from an elastic strain field. compute_total_strain
+ limitations are applicable for stress computation Get the XX normal
+ component (00 component).
+
Parameters
----------
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
- data_sources : DataSources, optional
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- strain : FieldsContainer or Field
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
+ data_sources: DataSources, optional
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ strain: FieldsContainer or Field
+ Field/or fields container containing only the elastic strain field (element nodal).
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -99,10 +96,11 @@ def __init__(
self.inputs.strain.connect(strain)
@staticmethod
- def _spec():
- description = """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation Get the
- XX normal component (00 component)."""
+ def _spec() -> Specification:
+ description = r"""Computes the stress from an elastic strain field. compute_total_strain
+limitations are applicable for stress computation Get the XX normal
+component (00 component).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -110,38 +108,31 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a data_sources have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a streams_container have been connected.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="strain",
type_names=["fields_container", "field"],
optional=False,
- document="""Field/or fields container containing only the
- elastic strain field (element nodal).""",
+ document=r"""Field/or fields container containing only the elastic strain field (element nodal).""",
),
},
map_output_pin_spec={
@@ -149,15 +140,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -166,29 +156,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_stress_X", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeStressX:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeStressX
+ inputs:
+ An instance of InputsComputeStressX.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeStressX:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeStressX
+ outputs:
+ An instance of OutputsComputeStressX.
"""
return super().outputs
@@ -231,15 +228,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._strain)
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -252,16 +249,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -274,16 +270,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -296,15 +291,15 @@ def data_sources(self):
return self._data_sources
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -317,15 +312,15 @@ def requested_location(self):
return self._requested_location
@property
- def strain(self):
- """Allows to connect strain input to the operator.
+ def strain(self) -> Input:
+ r"""Allows to connect strain input to the operator.
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ Field/or fields container containing only the elastic strain field (element nodal).
- Parameters
- ----------
- my_strain : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -356,18 +351,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_stress_X()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_stress_XY.py b/src/ansys/dpf/core/operators/result/compute_stress_XY.py
index dbd6fd3766e..eaddc657dd0 100644
--- a/src/ansys/dpf/core/operators/result/compute_stress_XY.py
+++ b/src/ansys/dpf/core/operators/result/compute_stress_XY.py
@@ -4,43 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_stress_XY(Operator):
- """Computes the stress from an elastic strain field. compute_total_strain
+ r"""Computes the stress from an elastic strain field. compute_total_strain
limitations are applicable for stress computation Get the XY shear
component (01 component).
+
Parameters
----------
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
- data_sources : DataSources, optional
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- strain : FieldsContainer or Field
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
+ data_sources: DataSources, optional
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ strain: FieldsContainer or Field
+ Field/or fields container containing only the elastic strain field (element nodal).
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -99,10 +96,11 @@ def __init__(
self.inputs.strain.connect(strain)
@staticmethod
- def _spec():
- description = """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation Get the
- XY shear component (01 component)."""
+ def _spec() -> Specification:
+ description = r"""Computes the stress from an elastic strain field. compute_total_strain
+limitations are applicable for stress computation Get the XY shear
+component (01 component).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -110,38 +108,31 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a data_sources have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a streams_container have been connected.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="strain",
type_names=["fields_container", "field"],
optional=False,
- document="""Field/or fields container containing only the
- elastic strain field (element nodal).""",
+ document=r"""Field/or fields container containing only the elastic strain field (element nodal).""",
),
},
map_output_pin_spec={
@@ -149,15 +140,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -166,29 +156,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_stress_XY", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeStressXy:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeStressXy
+ inputs:
+ An instance of InputsComputeStressXy.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeStressXy:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeStressXy
+ outputs:
+ An instance of OutputsComputeStressXy.
"""
return super().outputs
@@ -231,15 +228,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._strain)
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -252,16 +249,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -274,16 +270,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -296,15 +291,15 @@ def data_sources(self):
return self._data_sources
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -317,15 +312,15 @@ def requested_location(self):
return self._requested_location
@property
- def strain(self):
- """Allows to connect strain input to the operator.
+ def strain(self) -> Input:
+ r"""Allows to connect strain input to the operator.
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ Field/or fields container containing only the elastic strain field (element nodal).
- Parameters
- ----------
- my_strain : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -356,18 +351,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_stress_XY()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_stress_XZ.py b/src/ansys/dpf/core/operators/result/compute_stress_XZ.py
index eb9e8b6820c..e74d9bb639b 100644
--- a/src/ansys/dpf/core/operators/result/compute_stress_XZ.py
+++ b/src/ansys/dpf/core/operators/result/compute_stress_XZ.py
@@ -4,43 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_stress_XZ(Operator):
- """Computes the stress from an elastic strain field. compute_total_strain
+ r"""Computes the stress from an elastic strain field. compute_total_strain
limitations are applicable for stress computation Get the XZ shear
component (02 component).
+
Parameters
----------
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
- data_sources : DataSources, optional
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- strain : FieldsContainer or Field
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
+ data_sources: DataSources, optional
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ strain: FieldsContainer or Field
+ Field/or fields container containing only the elastic strain field (element nodal).
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -99,10 +96,11 @@ def __init__(
self.inputs.strain.connect(strain)
@staticmethod
- def _spec():
- description = """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation Get the
- XZ shear component (02 component)."""
+ def _spec() -> Specification:
+ description = r"""Computes the stress from an elastic strain field. compute_total_strain
+limitations are applicable for stress computation Get the XZ shear
+component (02 component).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -110,38 +108,31 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a data_sources have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a streams_container have been connected.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="strain",
type_names=["fields_container", "field"],
optional=False,
- document="""Field/or fields container containing only the
- elastic strain field (element nodal).""",
+ document=r"""Field/or fields container containing only the elastic strain field (element nodal).""",
),
},
map_output_pin_spec={
@@ -149,15 +140,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -166,29 +156,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_stress_XZ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeStressXz:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeStressXz
+ inputs:
+ An instance of InputsComputeStressXz.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeStressXz:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeStressXz
+ outputs:
+ An instance of OutputsComputeStressXz.
"""
return super().outputs
@@ -231,15 +228,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._strain)
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -252,16 +249,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -274,16 +270,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -296,15 +291,15 @@ def data_sources(self):
return self._data_sources
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -317,15 +312,15 @@ def requested_location(self):
return self._requested_location
@property
- def strain(self):
- """Allows to connect strain input to the operator.
+ def strain(self) -> Input:
+ r"""Allows to connect strain input to the operator.
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ Field/or fields container containing only the elastic strain field (element nodal).
- Parameters
- ----------
- my_strain : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -356,18 +351,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_stress_XZ()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_stress_Y.py b/src/ansys/dpf/core/operators/result/compute_stress_Y.py
index 090c826f2c4..d895195f43e 100644
--- a/src/ansys/dpf/core/operators/result/compute_stress_Y.py
+++ b/src/ansys/dpf/core/operators/result/compute_stress_Y.py
@@ -4,43 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_stress_Y(Operator):
- """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation Get the YY
- normal component (11 component).
+ r"""Computes the stress from an elastic strain field. compute_total_strain
+ limitations are applicable for stress computation Get the YY normal
+ component (11 component).
+
Parameters
----------
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
- data_sources : DataSources, optional
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- strain : FieldsContainer or Field
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
+ data_sources: DataSources, optional
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ strain: FieldsContainer or Field
+ Field/or fields container containing only the elastic strain field (element nodal).
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -99,10 +96,11 @@ def __init__(
self.inputs.strain.connect(strain)
@staticmethod
- def _spec():
- description = """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation Get the
- YY normal component (11 component)."""
+ def _spec() -> Specification:
+ description = r"""Computes the stress from an elastic strain field. compute_total_strain
+limitations are applicable for stress computation Get the YY normal
+component (11 component).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -110,38 +108,31 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a data_sources have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a streams_container have been connected.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="strain",
type_names=["fields_container", "field"],
optional=False,
- document="""Field/or fields container containing only the
- elastic strain field (element nodal).""",
+ document=r"""Field/or fields container containing only the elastic strain field (element nodal).""",
),
},
map_output_pin_spec={
@@ -149,15 +140,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -166,29 +156,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_stress_Y", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeStressY:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeStressY
+ inputs:
+ An instance of InputsComputeStressY.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeStressY:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeStressY
+ outputs:
+ An instance of OutputsComputeStressY.
"""
return super().outputs
@@ -231,15 +228,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._strain)
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -252,16 +249,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -274,16 +270,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -296,15 +291,15 @@ def data_sources(self):
return self._data_sources
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -317,15 +312,15 @@ def requested_location(self):
return self._requested_location
@property
- def strain(self):
- """Allows to connect strain input to the operator.
+ def strain(self) -> Input:
+ r"""Allows to connect strain input to the operator.
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ Field/or fields container containing only the elastic strain field (element nodal).
- Parameters
- ----------
- my_strain : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -356,18 +351,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_stress_Y()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_stress_YZ.py b/src/ansys/dpf/core/operators/result/compute_stress_YZ.py
index 6a840d88ef1..06bcebb5284 100644
--- a/src/ansys/dpf/core/operators/result/compute_stress_YZ.py
+++ b/src/ansys/dpf/core/operators/result/compute_stress_YZ.py
@@ -4,43 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_stress_YZ(Operator):
- """Computes the stress from an elastic strain field. compute_total_strain
+ r"""Computes the stress from an elastic strain field. compute_total_strain
limitations are applicable for stress computation Get the YZ shear
component (12 component).
+
Parameters
----------
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
- data_sources : DataSources, optional
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- strain : FieldsContainer or Field
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
+ data_sources: DataSources, optional
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ strain: FieldsContainer or Field
+ Field/or fields container containing only the elastic strain field (element nodal).
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -99,10 +96,11 @@ def __init__(
self.inputs.strain.connect(strain)
@staticmethod
- def _spec():
- description = """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation Get the
- YZ shear component (12 component)."""
+ def _spec() -> Specification:
+ description = r"""Computes the stress from an elastic strain field. compute_total_strain
+limitations are applicable for stress computation Get the YZ shear
+component (12 component).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -110,38 +108,31 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a data_sources have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a streams_container have been connected.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="strain",
type_names=["fields_container", "field"],
optional=False,
- document="""Field/or fields container containing only the
- elastic strain field (element nodal).""",
+ document=r"""Field/or fields container containing only the elastic strain field (element nodal).""",
),
},
map_output_pin_spec={
@@ -149,15 +140,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -166,29 +156,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_stress_YZ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeStressYz:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeStressYz
+ inputs:
+ An instance of InputsComputeStressYz.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeStressYz:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeStressYz
+ outputs:
+ An instance of OutputsComputeStressYz.
"""
return super().outputs
@@ -231,15 +228,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._strain)
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -252,16 +249,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -274,16 +270,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -296,15 +291,15 @@ def data_sources(self):
return self._data_sources
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -317,15 +312,15 @@ def requested_location(self):
return self._requested_location
@property
- def strain(self):
- """Allows to connect strain input to the operator.
+ def strain(self) -> Input:
+ r"""Allows to connect strain input to the operator.
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ Field/or fields container containing only the elastic strain field (element nodal).
- Parameters
- ----------
- my_strain : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -356,18 +351,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_stress_YZ()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_stress_Z.py b/src/ansys/dpf/core/operators/result/compute_stress_Z.py
index 9f9f57b40bf..98c317e7239 100644
--- a/src/ansys/dpf/core/operators/result/compute_stress_Z.py
+++ b/src/ansys/dpf/core/operators/result/compute_stress_Z.py
@@ -4,43 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_stress_Z(Operator):
- """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation Get the ZZ
- normal component (22 component).
+ r"""Computes the stress from an elastic strain field. compute_total_strain
+ limitations are applicable for stress computation Get the ZZ normal
+ component (22 component).
+
Parameters
----------
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
- data_sources : DataSources, optional
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- strain : FieldsContainer or Field
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
+ data_sources: DataSources, optional
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ strain: FieldsContainer or Field
+ Field/or fields container containing only the elastic strain field (element nodal).
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -99,10 +96,11 @@ def __init__(
self.inputs.strain.connect(strain)
@staticmethod
- def _spec():
- description = """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computation Get the
- ZZ normal component (22 component)."""
+ def _spec() -> Specification:
+ description = r"""Computes the stress from an elastic strain field. compute_total_strain
+limitations are applicable for stress computation Get the ZZ normal
+component (22 component).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -110,38 +108,31 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a data_sources have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a streams_container have been connected.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="strain",
type_names=["fields_container", "field"],
optional=False,
- document="""Field/or fields container containing only the
- elastic strain field (element nodal).""",
+ document=r"""Field/or fields container containing only the elastic strain field (element nodal).""",
),
},
map_output_pin_spec={
@@ -149,15 +140,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -166,29 +156,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_stress_Z", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeStressZ:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeStressZ
+ inputs:
+ An instance of InputsComputeStressZ.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeStressZ:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeStressZ
+ outputs:
+ An instance of OutputsComputeStressZ.
"""
return super().outputs
@@ -231,15 +228,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._strain)
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -252,16 +249,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -274,16 +270,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -296,15 +291,15 @@ def data_sources(self):
return self._data_sources
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -317,15 +312,15 @@ def requested_location(self):
return self._requested_location
@property
- def strain(self):
- """Allows to connect strain input to the operator.
+ def strain(self) -> Input:
+ r"""Allows to connect strain input to the operator.
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ Field/or fields container containing only the elastic strain field (element nodal).
- Parameters
- ----------
- my_strain : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -356,18 +351,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_stress_Z()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_stress_von_mises.py b/src/ansys/dpf/core/operators/result/compute_stress_von_mises.py
index 14e55dadaf0..d33bef7ee27 100644
--- a/src/ansys/dpf/core/operators/result/compute_stress_von_mises.py
+++ b/src/ansys/dpf/core/operators/result/compute_stress_von_mises.py
@@ -4,43 +4,40 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_stress_von_mises(Operator):
- """Computes the stress from an elastic strain field. compute_total_strain
+ r"""Computes the stress from an elastic strain field. compute_total_strain
limitations are applicable for stress computationGet the Von Mises
equivalent stress.
+
Parameters
----------
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
- data_sources : DataSources, optional
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- strain : FieldsContainer or Field
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
+ data_sources: DataSources, optional
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ strain: FieldsContainer or Field
+ Field/or fields container containing only the elastic strain field (element nodal).
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -99,10 +96,11 @@ def __init__(
self.inputs.strain.connect(strain)
@staticmethod
- def _spec():
- description = """Computes the stress from an elastic strain field. compute_total_strain
- limitations are applicable for stress computationGet the
- Von Mises equivalent stress."""
+ def _spec() -> Specification:
+ description = r"""Computes the stress from an elastic strain field. compute_total_strain
+limitations are applicable for stress computationGet the Von Mises
+equivalent stress.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -110,38 +108,31 @@ def _spec():
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a data_sources have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=True,
- document="""Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.""",
+ document=r"""Needed to get mesh and material ids. Optional if a streams_container have been connected.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="strain",
type_names=["fields_container", "field"],
optional=False,
- document="""Field/or fields container containing only the
- elastic strain field (element nodal).""",
+ document=r"""Field/or fields container containing only the elastic strain field (element nodal).""",
),
},
map_output_pin_spec={
@@ -149,15 +140,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -166,29 +156,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_stress_von_mises", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeStressVonMises:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeStressVonMises
+ inputs:
+ An instance of InputsComputeStressVonMises.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeStressVonMises:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeStressVonMises
+ outputs:
+ An instance of OutputsComputeStressVonMises.
"""
return super().outputs
@@ -233,15 +230,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._strain)
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -254,16 +251,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Needed to get mesh and material ids. optional
- if a data_sources have been
- connected.
+ Needed to get mesh and material ids. Optional if a data_sources have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -276,16 +272,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Needed to get mesh and material ids. optional
- if a streams_container have been
- connected.
+ Needed to get mesh and material ids. Optional if a streams_container have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -298,15 +293,15 @@ def data_sources(self):
return self._data_sources
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -319,15 +314,15 @@ def requested_location(self):
return self._requested_location
@property
- def strain(self):
- """Allows to connect strain input to the operator.
+ def strain(self) -> Input:
+ r"""Allows to connect strain input to the operator.
- Field/or fields container containing only the
- elastic strain field (element nodal).
+ Field/or fields container containing only the elastic strain field (element nodal).
- Parameters
- ----------
- my_strain : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -360,18 +355,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_stress_von_mises()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain.py b/src/ansys/dpf/core/operators/result/compute_total_strain.py
index 045726f2f1f..9c3efa83d46 100644
--- a/src/ansys/dpf/core/operators/result/compute_total_strain.py
+++ b/src/ansys/dpf/core/operators/result/compute_total_strain.py
@@ -4,85 +4,54 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_total_strain(Operator):
- """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are supported.
- Layered elements are not supported. Thermal strains are not
- supported. Only one value of material properties are allowed per
- element for isotropic and orthotropic elasticity. Material
- nonlinearity is not supported Only linear analysis are supported
- without On Demand Expansion. All coordinates are global
- coordinates. Euler Angles need to be included in the database.
+ r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+ Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+ (Full Integration) & SOLID187 elements are supported. Layered elements
+ are not supported. Thermal strains are not supported. Only one value of
+ material properties are allowed per element for isotropic and
+ orthotropic elasticity. Material nonlinearity is not supported Only
+ linear analysis are supported without On Demand Expansion. All
+ coordinates are global coordinates. Euler Angles need to be included in
+ the database.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
- data_sources : DataSources
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
- extrapolate : int, optional
- Whether to extrapolate the data from the
- integration points to the nodes.
- nonlinear : int, optional
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
- abstract_meshed_region : MeshedRegion, optional
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- displacement : FieldsContainer or Field, optional
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
+ data_sources: DataSources
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
+ extrapolate: int, optional
+ Whether to extrapolate the data from the integration points to the nodes.
+ nonlinear: int, optional
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
+ abstract_meshed_region: MeshedRegion, optional
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ displacement: FieldsContainer or Field, optional
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -165,17 +134,17 @@ def __init__(
self.inputs.displacement.connect(displacement)
@staticmethod
- def _spec():
- description = """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are
- supported. Layered elements are not supported. Thermal
- strains are not supported. Only one value of material
- properties are allowed per element for isotropic and
- orthotropic elasticity. Material nonlinearity is not
- supported Only linear analysis are supported without On
- Demand Expansion. All coordinates are global coordinates.
- Euler Angles need to be included in the database."""
+ def _spec() -> Specification:
+ description = r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+(Full Integration) & SOLID187 elements are supported. Layered elements
+are not supported. Thermal strains are not supported. Only one value of
+material properties are allowed per element for isotropic and
+orthotropic elasticity. Material nonlinearity is not supported Only
+linear analysis are supported without On Demand Expansion. All
+coordinates are global coordinates. Euler Angles need to be included in
+the database.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -190,90 +159,55 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.""",
+ document=r"""Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.""",
+ document=r"""Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.""",
),
5: PinSpecification(
name="extrapolate",
type_names=["int32"],
optional=True,
- document="""Whether to extrapolate the data from the
- integration points to the nodes.""",
+ document=r"""Whether to extrapolate the data from the integration points to the nodes.""",
),
6: PinSpecification(
name="nonlinear",
type_names=["int32"],
optional=True,
- document="""Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).""",
+ document=r"""Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).""",
),
7: PinSpecification(
name="abstract_meshed_region",
type_names=["abstract_meshed_region"],
optional=True,
- document="""The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.""",
+ document=r"""The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="displacement",
type_names=["fields_container", "field"],
optional=True,
- document="""Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.""",
+ document=r"""Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.""",
),
},
map_output_pin_spec={
@@ -281,15 +215,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -298,29 +231,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_total_strain", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeTotalStrain:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeTotalStrain
+ inputs:
+ An instance of InputsComputeTotalStrain.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeTotalStrain:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeTotalStrain
+ outputs:
+ An instance of OutputsComputeTotalStrain.
"""
return super().outputs
@@ -383,30 +323,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._displacement)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -419,15 +344,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -440,17 +365,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -463,18 +386,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -487,15 +407,15 @@ def data_sources(self):
return self._data_sources
@property
- def extrapolate(self):
- """Allows to connect extrapolate input to the operator.
+ def extrapolate(self) -> Input:
+ r"""Allows to connect extrapolate input to the operator.
- Whether to extrapolate the data from the
- integration points to the nodes.
+ Whether to extrapolate the data from the integration points to the nodes.
- Parameters
- ----------
- my_extrapolate : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -508,16 +428,15 @@ def extrapolate(self):
return self._extrapolate
@property
- def nonlinear(self):
- """Allows to connect nonlinear input to the operator.
+ def nonlinear(self) -> Input:
+ r"""Allows to connect nonlinear input to the operator.
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
- Parameters
- ----------
- my_nonlinear : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -530,18 +449,15 @@ def nonlinear(self):
return self._nonlinear
@property
- def abstract_meshed_region(self):
- """Allows to connect abstract_meshed_region input to the operator.
+ def abstract_meshed_region(self) -> Input:
+ r"""Allows to connect abstract_meshed_region input to the operator.
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
- Parameters
- ----------
- my_abstract_meshed_region : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -554,15 +470,15 @@ def abstract_meshed_region(self):
return self._abstract_meshed_region
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -575,17 +491,15 @@ def requested_location(self):
return self._requested_location
@property
- def displacement(self):
- """Allows to connect displacement input to the operator.
+ def displacement(self) -> Input:
+ r"""Allows to connect displacement input to the operator.
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
- Parameters
- ----------
- my_displacement : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -618,18 +532,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_total_strain()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_1.py b/src/ansys/dpf/core/operators/result/compute_total_strain_1.py
index 646b0f98b7f..65d4b8faef7 100644
--- a/src/ansys/dpf/core/operators/result/compute_total_strain_1.py
+++ b/src/ansys/dpf/core/operators/result/compute_total_strain_1.py
@@ -4,86 +4,54 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_total_strain_1(Operator):
- """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are supported.
- Layered elements are not supported. Thermal strains are not
- supported. Only one value of material properties are allowed per
- element for isotropic and orthotropic elasticity. Material
- nonlinearity is not supported Only linear analysis are supported
- without On Demand Expansion. All coordinates are global
- coordinates. Euler Angles need to be included in the database.
- Get the 1st principal component.
+ r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+ Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+ (Full Integration) & SOLID187 elements are supported. Layered elements
+ are not supported. Thermal strains are not supported. Only one value of
+ material properties are allowed per element for isotropic and
+ orthotropic elasticity. Material nonlinearity is not supported Only
+ linear analysis are supported without On Demand Expansion. All
+ coordinates are global coordinates. Euler Angles need to be included in
+ the database. Get the 1st principal component.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
- data_sources : DataSources
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
- extrapolate : int, optional
- Whether to extrapolate the data from the
- integration points to the nodes.
- nonlinear : int, optional
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
- abstract_meshed_region : MeshedRegion, optional
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- displacement : FieldsContainer or Field, optional
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
+ data_sources: DataSources
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
+ extrapolate: int, optional
+ Whether to extrapolate the data from the integration points to the nodes.
+ nonlinear: int, optional
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
+ abstract_meshed_region: MeshedRegion, optional
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ displacement: FieldsContainer or Field, optional
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -166,18 +134,17 @@ def __init__(
self.inputs.displacement.connect(displacement)
@staticmethod
- def _spec():
- description = """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are
- supported. Layered elements are not supported. Thermal
- strains are not supported. Only one value of material
- properties are allowed per element for isotropic and
- orthotropic elasticity. Material nonlinearity is not
- supported Only linear analysis are supported without On
- Demand Expansion. All coordinates are global coordinates.
- Euler Angles need to be included in the database. Get the
- 1st principal component."""
+ def _spec() -> Specification:
+ description = r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+(Full Integration) & SOLID187 elements are supported. Layered elements
+are not supported. Thermal strains are not supported. Only one value of
+material properties are allowed per element for isotropic and
+orthotropic elasticity. Material nonlinearity is not supported Only
+linear analysis are supported without On Demand Expansion. All
+coordinates are global coordinates. Euler Angles need to be included in
+the database. Get the 1st principal component.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -192,90 +159,55 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.""",
+ document=r"""Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.""",
+ document=r"""Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.""",
),
5: PinSpecification(
name="extrapolate",
type_names=["int32"],
optional=True,
- document="""Whether to extrapolate the data from the
- integration points to the nodes.""",
+ document=r"""Whether to extrapolate the data from the integration points to the nodes.""",
),
6: PinSpecification(
name="nonlinear",
type_names=["int32"],
optional=True,
- document="""Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).""",
+ document=r"""Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).""",
),
7: PinSpecification(
name="abstract_meshed_region",
type_names=["abstract_meshed_region"],
optional=True,
- document="""The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.""",
+ document=r"""The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="displacement",
type_names=["fields_container", "field"],
optional=True,
- document="""Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.""",
+ document=r"""Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.""",
),
},
map_output_pin_spec={
@@ -283,15 +215,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -300,29 +231,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_total_strain_1", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeTotalStrain1:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeTotalStrain1
+ inputs:
+ An instance of InputsComputeTotalStrain1.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeTotalStrain1:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeTotalStrain1
+ outputs:
+ An instance of OutputsComputeTotalStrain1.
"""
return super().outputs
@@ -391,30 +329,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._displacement)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -427,15 +350,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -448,17 +371,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -471,18 +392,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -495,15 +413,15 @@ def data_sources(self):
return self._data_sources
@property
- def extrapolate(self):
- """Allows to connect extrapolate input to the operator.
+ def extrapolate(self) -> Input:
+ r"""Allows to connect extrapolate input to the operator.
- Whether to extrapolate the data from the
- integration points to the nodes.
+ Whether to extrapolate the data from the integration points to the nodes.
- Parameters
- ----------
- my_extrapolate : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -516,16 +434,15 @@ def extrapolate(self):
return self._extrapolate
@property
- def nonlinear(self):
- """Allows to connect nonlinear input to the operator.
+ def nonlinear(self) -> Input:
+ r"""Allows to connect nonlinear input to the operator.
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
- Parameters
- ----------
- my_nonlinear : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -538,18 +455,15 @@ def nonlinear(self):
return self._nonlinear
@property
- def abstract_meshed_region(self):
- """Allows to connect abstract_meshed_region input to the operator.
+ def abstract_meshed_region(self) -> Input:
+ r"""Allows to connect abstract_meshed_region input to the operator.
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
- Parameters
- ----------
- my_abstract_meshed_region : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,15 +476,15 @@ def abstract_meshed_region(self):
return self._abstract_meshed_region
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -583,17 +497,15 @@ def requested_location(self):
return self._requested_location
@property
- def displacement(self):
- """Allows to connect displacement input to the operator.
+ def displacement(self) -> Input:
+ r"""Allows to connect displacement input to the operator.
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
- Parameters
- ----------
- my_displacement : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -626,18 +538,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_total_strain_1()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_2.py b/src/ansys/dpf/core/operators/result/compute_total_strain_2.py
index 9b9e1ea9846..28065915321 100644
--- a/src/ansys/dpf/core/operators/result/compute_total_strain_2.py
+++ b/src/ansys/dpf/core/operators/result/compute_total_strain_2.py
@@ -4,86 +4,54 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_total_strain_2(Operator):
- """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are supported.
- Layered elements are not supported. Thermal strains are not
- supported. Only one value of material properties are allowed per
- element for isotropic and orthotropic elasticity. Material
- nonlinearity is not supported Only linear analysis are supported
- without On Demand Expansion. All coordinates are global
- coordinates. Euler Angles need to be included in the database.
- Get the 2nd principal component.
+ r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+ Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+ (Full Integration) & SOLID187 elements are supported. Layered elements
+ are not supported. Thermal strains are not supported. Only one value of
+ material properties are allowed per element for isotropic and
+ orthotropic elasticity. Material nonlinearity is not supported Only
+ linear analysis are supported without On Demand Expansion. All
+ coordinates are global coordinates. Euler Angles need to be included in
+ the database. Get the 2nd principal component.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
- data_sources : DataSources
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
- extrapolate : int, optional
- Whether to extrapolate the data from the
- integration points to the nodes.
- nonlinear : int, optional
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
- abstract_meshed_region : MeshedRegion, optional
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- displacement : FieldsContainer or Field, optional
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
+ data_sources: DataSources
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
+ extrapolate: int, optional
+ Whether to extrapolate the data from the integration points to the nodes.
+ nonlinear: int, optional
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
+ abstract_meshed_region: MeshedRegion, optional
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ displacement: FieldsContainer or Field, optional
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -166,18 +134,17 @@ def __init__(
self.inputs.displacement.connect(displacement)
@staticmethod
- def _spec():
- description = """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are
- supported. Layered elements are not supported. Thermal
- strains are not supported. Only one value of material
- properties are allowed per element for isotropic and
- orthotropic elasticity. Material nonlinearity is not
- supported Only linear analysis are supported without On
- Demand Expansion. All coordinates are global coordinates.
- Euler Angles need to be included in the database. Get the
- 2nd principal component."""
+ def _spec() -> Specification:
+ description = r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+(Full Integration) & SOLID187 elements are supported. Layered elements
+are not supported. Thermal strains are not supported. Only one value of
+material properties are allowed per element for isotropic and
+orthotropic elasticity. Material nonlinearity is not supported Only
+linear analysis are supported without On Demand Expansion. All
+coordinates are global coordinates. Euler Angles need to be included in
+the database. Get the 2nd principal component.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -192,90 +159,55 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.""",
+ document=r"""Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.""",
+ document=r"""Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.""",
),
5: PinSpecification(
name="extrapolate",
type_names=["int32"],
optional=True,
- document="""Whether to extrapolate the data from the
- integration points to the nodes.""",
+ document=r"""Whether to extrapolate the data from the integration points to the nodes.""",
),
6: PinSpecification(
name="nonlinear",
type_names=["int32"],
optional=True,
- document="""Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).""",
+ document=r"""Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).""",
),
7: PinSpecification(
name="abstract_meshed_region",
type_names=["abstract_meshed_region"],
optional=True,
- document="""The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.""",
+ document=r"""The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="displacement",
type_names=["fields_container", "field"],
optional=True,
- document="""Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.""",
+ document=r"""Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.""",
),
},
map_output_pin_spec={
@@ -283,15 +215,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -300,29 +231,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_total_strain_2", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeTotalStrain2:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeTotalStrain2
+ inputs:
+ An instance of InputsComputeTotalStrain2.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeTotalStrain2:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeTotalStrain2
+ outputs:
+ An instance of OutputsComputeTotalStrain2.
"""
return super().outputs
@@ -391,30 +329,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._displacement)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -427,15 +350,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -448,17 +371,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -471,18 +392,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -495,15 +413,15 @@ def data_sources(self):
return self._data_sources
@property
- def extrapolate(self):
- """Allows to connect extrapolate input to the operator.
+ def extrapolate(self) -> Input:
+ r"""Allows to connect extrapolate input to the operator.
- Whether to extrapolate the data from the
- integration points to the nodes.
+ Whether to extrapolate the data from the integration points to the nodes.
- Parameters
- ----------
- my_extrapolate : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -516,16 +434,15 @@ def extrapolate(self):
return self._extrapolate
@property
- def nonlinear(self):
- """Allows to connect nonlinear input to the operator.
+ def nonlinear(self) -> Input:
+ r"""Allows to connect nonlinear input to the operator.
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
- Parameters
- ----------
- my_nonlinear : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -538,18 +455,15 @@ def nonlinear(self):
return self._nonlinear
@property
- def abstract_meshed_region(self):
- """Allows to connect abstract_meshed_region input to the operator.
+ def abstract_meshed_region(self) -> Input:
+ r"""Allows to connect abstract_meshed_region input to the operator.
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
- Parameters
- ----------
- my_abstract_meshed_region : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,15 +476,15 @@ def abstract_meshed_region(self):
return self._abstract_meshed_region
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -583,17 +497,15 @@ def requested_location(self):
return self._requested_location
@property
- def displacement(self):
- """Allows to connect displacement input to the operator.
+ def displacement(self) -> Input:
+ r"""Allows to connect displacement input to the operator.
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
- Parameters
- ----------
- my_displacement : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -626,18 +538,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_total_strain_2()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_3.py b/src/ansys/dpf/core/operators/result/compute_total_strain_3.py
index a347eae637e..5a1665cd43f 100644
--- a/src/ansys/dpf/core/operators/result/compute_total_strain_3.py
+++ b/src/ansys/dpf/core/operators/result/compute_total_strain_3.py
@@ -4,86 +4,54 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_total_strain_3(Operator):
- """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are supported.
- Layered elements are not supported. Thermal strains are not
- supported. Only one value of material properties are allowed per
- element for isotropic and orthotropic elasticity. Material
- nonlinearity is not supported Only linear analysis are supported
- without On Demand Expansion. All coordinates are global
- coordinates. Euler Angles need to be included in the database.
- Get the 3rd principal component.
+ r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+ Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+ (Full Integration) & SOLID187 elements are supported. Layered elements
+ are not supported. Thermal strains are not supported. Only one value of
+ material properties are allowed per element for isotropic and
+ orthotropic elasticity. Material nonlinearity is not supported Only
+ linear analysis are supported without On Demand Expansion. All
+ coordinates are global coordinates. Euler Angles need to be included in
+ the database. Get the 3rd principal component.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
- data_sources : DataSources
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
- extrapolate : int, optional
- Whether to extrapolate the data from the
- integration points to the nodes.
- nonlinear : int, optional
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
- abstract_meshed_region : MeshedRegion, optional
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- displacement : FieldsContainer or Field, optional
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
+ data_sources: DataSources
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
+ extrapolate: int, optional
+ Whether to extrapolate the data from the integration points to the nodes.
+ nonlinear: int, optional
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
+ abstract_meshed_region: MeshedRegion, optional
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ displacement: FieldsContainer or Field, optional
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -166,18 +134,17 @@ def __init__(
self.inputs.displacement.connect(displacement)
@staticmethod
- def _spec():
- description = """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are
- supported. Layered elements are not supported. Thermal
- strains are not supported. Only one value of material
- properties are allowed per element for isotropic and
- orthotropic elasticity. Material nonlinearity is not
- supported Only linear analysis are supported without On
- Demand Expansion. All coordinates are global coordinates.
- Euler Angles need to be included in the database. Get the
- 3rd principal component."""
+ def _spec() -> Specification:
+ description = r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+(Full Integration) & SOLID187 elements are supported. Layered elements
+are not supported. Thermal strains are not supported. Only one value of
+material properties are allowed per element for isotropic and
+orthotropic elasticity. Material nonlinearity is not supported Only
+linear analysis are supported without On Demand Expansion. All
+coordinates are global coordinates. Euler Angles need to be included in
+the database. Get the 3rd principal component.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -192,90 +159,55 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.""",
+ document=r"""Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.""",
+ document=r"""Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.""",
),
5: PinSpecification(
name="extrapolate",
type_names=["int32"],
optional=True,
- document="""Whether to extrapolate the data from the
- integration points to the nodes.""",
+ document=r"""Whether to extrapolate the data from the integration points to the nodes.""",
),
6: PinSpecification(
name="nonlinear",
type_names=["int32"],
optional=True,
- document="""Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).""",
+ document=r"""Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).""",
),
7: PinSpecification(
name="abstract_meshed_region",
type_names=["abstract_meshed_region"],
optional=True,
- document="""The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.""",
+ document=r"""The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="displacement",
type_names=["fields_container", "field"],
optional=True,
- document="""Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.""",
+ document=r"""Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.""",
),
},
map_output_pin_spec={
@@ -283,15 +215,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -300,29 +231,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_total_strain_3", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeTotalStrain3:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeTotalStrain3
+ inputs:
+ An instance of InputsComputeTotalStrain3.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeTotalStrain3:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeTotalStrain3
+ outputs:
+ An instance of OutputsComputeTotalStrain3.
"""
return super().outputs
@@ -391,30 +329,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._displacement)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -427,15 +350,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -448,17 +371,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -471,18 +392,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -495,15 +413,15 @@ def data_sources(self):
return self._data_sources
@property
- def extrapolate(self):
- """Allows to connect extrapolate input to the operator.
+ def extrapolate(self) -> Input:
+ r"""Allows to connect extrapolate input to the operator.
- Whether to extrapolate the data from the
- integration points to the nodes.
+ Whether to extrapolate the data from the integration points to the nodes.
- Parameters
- ----------
- my_extrapolate : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -516,16 +434,15 @@ def extrapolate(self):
return self._extrapolate
@property
- def nonlinear(self):
- """Allows to connect nonlinear input to the operator.
+ def nonlinear(self) -> Input:
+ r"""Allows to connect nonlinear input to the operator.
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
- Parameters
- ----------
- my_nonlinear : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -538,18 +455,15 @@ def nonlinear(self):
return self._nonlinear
@property
- def abstract_meshed_region(self):
- """Allows to connect abstract_meshed_region input to the operator.
+ def abstract_meshed_region(self) -> Input:
+ r"""Allows to connect abstract_meshed_region input to the operator.
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
- Parameters
- ----------
- my_abstract_meshed_region : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,15 +476,15 @@ def abstract_meshed_region(self):
return self._abstract_meshed_region
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -583,17 +497,15 @@ def requested_location(self):
return self._requested_location
@property
- def displacement(self):
- """Allows to connect displacement input to the operator.
+ def displacement(self) -> Input:
+ r"""Allows to connect displacement input to the operator.
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
- Parameters
- ----------
- my_displacement : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -626,18 +538,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_total_strain_3()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_X.py b/src/ansys/dpf/core/operators/result/compute_total_strain_X.py
index c6a7057c5e1..a540fdcaffc 100644
--- a/src/ansys/dpf/core/operators/result/compute_total_strain_X.py
+++ b/src/ansys/dpf/core/operators/result/compute_total_strain_X.py
@@ -4,86 +4,54 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_total_strain_X(Operator):
- """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are supported.
- Layered elements are not supported. Thermal strains are not
- supported. Only one value of material properties are allowed per
- element for isotropic and orthotropic elasticity. Material
- nonlinearity is not supported Only linear analysis are supported
- without On Demand Expansion. All coordinates are global
- coordinates. Euler Angles need to be included in the database.
- Get the XX normal component (00 component).
+ r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+ Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+ (Full Integration) & SOLID187 elements are supported. Layered elements
+ are not supported. Thermal strains are not supported. Only one value of
+ material properties are allowed per element for isotropic and
+ orthotropic elasticity. Material nonlinearity is not supported Only
+ linear analysis are supported without On Demand Expansion. All
+ coordinates are global coordinates. Euler Angles need to be included in
+ the database. Get the XX normal component (00 component).
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
- data_sources : DataSources
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
- extrapolate : int, optional
- Whether to extrapolate the data from the
- integration points to the nodes.
- nonlinear : int, optional
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
- abstract_meshed_region : MeshedRegion, optional
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- displacement : FieldsContainer or Field, optional
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
+ data_sources: DataSources
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
+ extrapolate: int, optional
+ Whether to extrapolate the data from the integration points to the nodes.
+ nonlinear: int, optional
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
+ abstract_meshed_region: MeshedRegion, optional
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ displacement: FieldsContainer or Field, optional
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -166,18 +134,17 @@ def __init__(
self.inputs.displacement.connect(displacement)
@staticmethod
- def _spec():
- description = """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are
- supported. Layered elements are not supported. Thermal
- strains are not supported. Only one value of material
- properties are allowed per element for isotropic and
- orthotropic elasticity. Material nonlinearity is not
- supported Only linear analysis are supported without On
- Demand Expansion. All coordinates are global coordinates.
- Euler Angles need to be included in the database. Get the
- XX normal component (00 component)."""
+ def _spec() -> Specification:
+ description = r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+(Full Integration) & SOLID187 elements are supported. Layered elements
+are not supported. Thermal strains are not supported. Only one value of
+material properties are allowed per element for isotropic and
+orthotropic elasticity. Material nonlinearity is not supported Only
+linear analysis are supported without On Demand Expansion. All
+coordinates are global coordinates. Euler Angles need to be included in
+the database. Get the XX normal component (00 component).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -192,90 +159,55 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.""",
+ document=r"""Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.""",
+ document=r"""Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.""",
),
5: PinSpecification(
name="extrapolate",
type_names=["int32"],
optional=True,
- document="""Whether to extrapolate the data from the
- integration points to the nodes.""",
+ document=r"""Whether to extrapolate the data from the integration points to the nodes.""",
),
6: PinSpecification(
name="nonlinear",
type_names=["int32"],
optional=True,
- document="""Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).""",
+ document=r"""Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).""",
),
7: PinSpecification(
name="abstract_meshed_region",
type_names=["abstract_meshed_region"],
optional=True,
- document="""The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.""",
+ document=r"""The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="displacement",
type_names=["fields_container", "field"],
optional=True,
- document="""Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.""",
+ document=r"""Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.""",
),
},
map_output_pin_spec={
@@ -283,15 +215,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -300,29 +231,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_total_strain_X", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeTotalStrainX:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeTotalStrainX
+ inputs:
+ An instance of InputsComputeTotalStrainX.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeTotalStrainX:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeTotalStrainX
+ outputs:
+ An instance of OutputsComputeTotalStrainX.
"""
return super().outputs
@@ -391,30 +329,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._displacement)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -427,15 +350,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -448,17 +371,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -471,18 +392,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -495,15 +413,15 @@ def data_sources(self):
return self._data_sources
@property
- def extrapolate(self):
- """Allows to connect extrapolate input to the operator.
+ def extrapolate(self) -> Input:
+ r"""Allows to connect extrapolate input to the operator.
- Whether to extrapolate the data from the
- integration points to the nodes.
+ Whether to extrapolate the data from the integration points to the nodes.
- Parameters
- ----------
- my_extrapolate : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -516,16 +434,15 @@ def extrapolate(self):
return self._extrapolate
@property
- def nonlinear(self):
- """Allows to connect nonlinear input to the operator.
+ def nonlinear(self) -> Input:
+ r"""Allows to connect nonlinear input to the operator.
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
- Parameters
- ----------
- my_nonlinear : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -538,18 +455,15 @@ def nonlinear(self):
return self._nonlinear
@property
- def abstract_meshed_region(self):
- """Allows to connect abstract_meshed_region input to the operator.
+ def abstract_meshed_region(self) -> Input:
+ r"""Allows to connect abstract_meshed_region input to the operator.
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
- Parameters
- ----------
- my_abstract_meshed_region : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,15 +476,15 @@ def abstract_meshed_region(self):
return self._abstract_meshed_region
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -583,17 +497,15 @@ def requested_location(self):
return self._requested_location
@property
- def displacement(self):
- """Allows to connect displacement input to the operator.
+ def displacement(self) -> Input:
+ r"""Allows to connect displacement input to the operator.
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
- Parameters
- ----------
- my_displacement : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -626,18 +538,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_total_strain_X()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_XY.py b/src/ansys/dpf/core/operators/result/compute_total_strain_XY.py
index bc030688902..ab932369888 100644
--- a/src/ansys/dpf/core/operators/result/compute_total_strain_XY.py
+++ b/src/ansys/dpf/core/operators/result/compute_total_strain_XY.py
@@ -4,86 +4,54 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_total_strain_XY(Operator):
- """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are supported.
- Layered elements are not supported. Thermal strains are not
- supported. Only one value of material properties are allowed per
- element for isotropic and orthotropic elasticity. Material
- nonlinearity is not supported Only linear analysis are supported
- without On Demand Expansion. All coordinates are global
- coordinates. Euler Angles need to be included in the database.
- Get the XY shear component (01 component).
+ r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+ Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+ (Full Integration) & SOLID187 elements are supported. Layered elements
+ are not supported. Thermal strains are not supported. Only one value of
+ material properties are allowed per element for isotropic and
+ orthotropic elasticity. Material nonlinearity is not supported Only
+ linear analysis are supported without On Demand Expansion. All
+ coordinates are global coordinates. Euler Angles need to be included in
+ the database. Get the XY shear component (01 component).
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
- data_sources : DataSources
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
- extrapolate : int, optional
- Whether to extrapolate the data from the
- integration points to the nodes.
- nonlinear : int, optional
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
- abstract_meshed_region : MeshedRegion, optional
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- displacement : FieldsContainer or Field, optional
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
+ data_sources: DataSources
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
+ extrapolate: int, optional
+ Whether to extrapolate the data from the integration points to the nodes.
+ nonlinear: int, optional
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
+ abstract_meshed_region: MeshedRegion, optional
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ displacement: FieldsContainer or Field, optional
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -166,18 +134,17 @@ def __init__(
self.inputs.displacement.connect(displacement)
@staticmethod
- def _spec():
- description = """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are
- supported. Layered elements are not supported. Thermal
- strains are not supported. Only one value of material
- properties are allowed per element for isotropic and
- orthotropic elasticity. Material nonlinearity is not
- supported Only linear analysis are supported without On
- Demand Expansion. All coordinates are global coordinates.
- Euler Angles need to be included in the database. Get the
- XY shear component (01 component)."""
+ def _spec() -> Specification:
+ description = r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+(Full Integration) & SOLID187 elements are supported. Layered elements
+are not supported. Thermal strains are not supported. Only one value of
+material properties are allowed per element for isotropic and
+orthotropic elasticity. Material nonlinearity is not supported Only
+linear analysis are supported without On Demand Expansion. All
+coordinates are global coordinates. Euler Angles need to be included in
+the database. Get the XY shear component (01 component).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -192,90 +159,55 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.""",
+ document=r"""Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.""",
+ document=r"""Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.""",
),
5: PinSpecification(
name="extrapolate",
type_names=["int32"],
optional=True,
- document="""Whether to extrapolate the data from the
- integration points to the nodes.""",
+ document=r"""Whether to extrapolate the data from the integration points to the nodes.""",
),
6: PinSpecification(
name="nonlinear",
type_names=["int32"],
optional=True,
- document="""Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).""",
+ document=r"""Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).""",
),
7: PinSpecification(
name="abstract_meshed_region",
type_names=["abstract_meshed_region"],
optional=True,
- document="""The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.""",
+ document=r"""The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="displacement",
type_names=["fields_container", "field"],
optional=True,
- document="""Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.""",
+ document=r"""Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.""",
),
},
map_output_pin_spec={
@@ -283,15 +215,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -300,29 +231,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_total_strain_XY", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeTotalStrainXy:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeTotalStrainXy
+ inputs:
+ An instance of InputsComputeTotalStrainXy.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeTotalStrainXy:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeTotalStrainXy
+ outputs:
+ An instance of OutputsComputeTotalStrainXy.
"""
return super().outputs
@@ -391,30 +329,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._displacement)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -427,15 +350,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -448,17 +371,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -471,18 +392,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -495,15 +413,15 @@ def data_sources(self):
return self._data_sources
@property
- def extrapolate(self):
- """Allows to connect extrapolate input to the operator.
+ def extrapolate(self) -> Input:
+ r"""Allows to connect extrapolate input to the operator.
- Whether to extrapolate the data from the
- integration points to the nodes.
+ Whether to extrapolate the data from the integration points to the nodes.
- Parameters
- ----------
- my_extrapolate : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -516,16 +434,15 @@ def extrapolate(self):
return self._extrapolate
@property
- def nonlinear(self):
- """Allows to connect nonlinear input to the operator.
+ def nonlinear(self) -> Input:
+ r"""Allows to connect nonlinear input to the operator.
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
- Parameters
- ----------
- my_nonlinear : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -538,18 +455,15 @@ def nonlinear(self):
return self._nonlinear
@property
- def abstract_meshed_region(self):
- """Allows to connect abstract_meshed_region input to the operator.
+ def abstract_meshed_region(self) -> Input:
+ r"""Allows to connect abstract_meshed_region input to the operator.
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
- Parameters
- ----------
- my_abstract_meshed_region : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,15 +476,15 @@ def abstract_meshed_region(self):
return self._abstract_meshed_region
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -583,17 +497,15 @@ def requested_location(self):
return self._requested_location
@property
- def displacement(self):
- """Allows to connect displacement input to the operator.
+ def displacement(self) -> Input:
+ r"""Allows to connect displacement input to the operator.
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
- Parameters
- ----------
- my_displacement : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -626,18 +538,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_total_strain_XY()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_XZ.py b/src/ansys/dpf/core/operators/result/compute_total_strain_XZ.py
index 00038b27e64..304e435c883 100644
--- a/src/ansys/dpf/core/operators/result/compute_total_strain_XZ.py
+++ b/src/ansys/dpf/core/operators/result/compute_total_strain_XZ.py
@@ -4,86 +4,54 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_total_strain_XZ(Operator):
- """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are supported.
- Layered elements are not supported. Thermal strains are not
- supported. Only one value of material properties are allowed per
- element for isotropic and orthotropic elasticity. Material
- nonlinearity is not supported Only linear analysis are supported
- without On Demand Expansion. All coordinates are global
- coordinates. Euler Angles need to be included in the database.
- Get the XZ shear component (02 component).
+ r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+ Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+ (Full Integration) & SOLID187 elements are supported. Layered elements
+ are not supported. Thermal strains are not supported. Only one value of
+ material properties are allowed per element for isotropic and
+ orthotropic elasticity. Material nonlinearity is not supported Only
+ linear analysis are supported without On Demand Expansion. All
+ coordinates are global coordinates. Euler Angles need to be included in
+ the database. Get the XZ shear component (02 component).
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
- data_sources : DataSources
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
- extrapolate : int, optional
- Whether to extrapolate the data from the
- integration points to the nodes.
- nonlinear : int, optional
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
- abstract_meshed_region : MeshedRegion, optional
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- displacement : FieldsContainer or Field, optional
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
+ data_sources: DataSources
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
+ extrapolate: int, optional
+ Whether to extrapolate the data from the integration points to the nodes.
+ nonlinear: int, optional
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
+ abstract_meshed_region: MeshedRegion, optional
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ displacement: FieldsContainer or Field, optional
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -166,18 +134,17 @@ def __init__(
self.inputs.displacement.connect(displacement)
@staticmethod
- def _spec():
- description = """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are
- supported. Layered elements are not supported. Thermal
- strains are not supported. Only one value of material
- properties are allowed per element for isotropic and
- orthotropic elasticity. Material nonlinearity is not
- supported Only linear analysis are supported without On
- Demand Expansion. All coordinates are global coordinates.
- Euler Angles need to be included in the database. Get the
- XZ shear component (02 component)."""
+ def _spec() -> Specification:
+ description = r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+(Full Integration) & SOLID187 elements are supported. Layered elements
+are not supported. Thermal strains are not supported. Only one value of
+material properties are allowed per element for isotropic and
+orthotropic elasticity. Material nonlinearity is not supported Only
+linear analysis are supported without On Demand Expansion. All
+coordinates are global coordinates. Euler Angles need to be included in
+the database. Get the XZ shear component (02 component).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -192,90 +159,55 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.""",
+ document=r"""Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.""",
+ document=r"""Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.""",
),
5: PinSpecification(
name="extrapolate",
type_names=["int32"],
optional=True,
- document="""Whether to extrapolate the data from the
- integration points to the nodes.""",
+ document=r"""Whether to extrapolate the data from the integration points to the nodes.""",
),
6: PinSpecification(
name="nonlinear",
type_names=["int32"],
optional=True,
- document="""Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).""",
+ document=r"""Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).""",
),
7: PinSpecification(
name="abstract_meshed_region",
type_names=["abstract_meshed_region"],
optional=True,
- document="""The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.""",
+ document=r"""The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="displacement",
type_names=["fields_container", "field"],
optional=True,
- document="""Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.""",
+ document=r"""Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.""",
),
},
map_output_pin_spec={
@@ -283,15 +215,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -300,29 +231,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_total_strain_XZ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeTotalStrainXz:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeTotalStrainXz
+ inputs:
+ An instance of InputsComputeTotalStrainXz.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeTotalStrainXz:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeTotalStrainXz
+ outputs:
+ An instance of OutputsComputeTotalStrainXz.
"""
return super().outputs
@@ -391,30 +329,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._displacement)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -427,15 +350,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -448,17 +371,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -471,18 +392,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -495,15 +413,15 @@ def data_sources(self):
return self._data_sources
@property
- def extrapolate(self):
- """Allows to connect extrapolate input to the operator.
+ def extrapolate(self) -> Input:
+ r"""Allows to connect extrapolate input to the operator.
- Whether to extrapolate the data from the
- integration points to the nodes.
+ Whether to extrapolate the data from the integration points to the nodes.
- Parameters
- ----------
- my_extrapolate : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -516,16 +434,15 @@ def extrapolate(self):
return self._extrapolate
@property
- def nonlinear(self):
- """Allows to connect nonlinear input to the operator.
+ def nonlinear(self) -> Input:
+ r"""Allows to connect nonlinear input to the operator.
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
- Parameters
- ----------
- my_nonlinear : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -538,18 +455,15 @@ def nonlinear(self):
return self._nonlinear
@property
- def abstract_meshed_region(self):
- """Allows to connect abstract_meshed_region input to the operator.
+ def abstract_meshed_region(self) -> Input:
+ r"""Allows to connect abstract_meshed_region input to the operator.
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
- Parameters
- ----------
- my_abstract_meshed_region : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,15 +476,15 @@ def abstract_meshed_region(self):
return self._abstract_meshed_region
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -583,17 +497,15 @@ def requested_location(self):
return self._requested_location
@property
- def displacement(self):
- """Allows to connect displacement input to the operator.
+ def displacement(self) -> Input:
+ r"""Allows to connect displacement input to the operator.
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
- Parameters
- ----------
- my_displacement : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -626,18 +538,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_total_strain_XZ()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_Y.py b/src/ansys/dpf/core/operators/result/compute_total_strain_Y.py
index d564be12997..e487d79026e 100644
--- a/src/ansys/dpf/core/operators/result/compute_total_strain_Y.py
+++ b/src/ansys/dpf/core/operators/result/compute_total_strain_Y.py
@@ -4,86 +4,54 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_total_strain_Y(Operator):
- """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are supported.
- Layered elements are not supported. Thermal strains are not
- supported. Only one value of material properties are allowed per
- element for isotropic and orthotropic elasticity. Material
- nonlinearity is not supported Only linear analysis are supported
- without On Demand Expansion. All coordinates are global
- coordinates. Euler Angles need to be included in the database.
- Get the YY normal component (11 component).
+ r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+ Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+ (Full Integration) & SOLID187 elements are supported. Layered elements
+ are not supported. Thermal strains are not supported. Only one value of
+ material properties are allowed per element for isotropic and
+ orthotropic elasticity. Material nonlinearity is not supported Only
+ linear analysis are supported without On Demand Expansion. All
+ coordinates are global coordinates. Euler Angles need to be included in
+ the database. Get the YY normal component (11 component).
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
- data_sources : DataSources
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
- extrapolate : int, optional
- Whether to extrapolate the data from the
- integration points to the nodes.
- nonlinear : int, optional
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
- abstract_meshed_region : MeshedRegion, optional
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- displacement : FieldsContainer or Field, optional
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
+ data_sources: DataSources
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
+ extrapolate: int, optional
+ Whether to extrapolate the data from the integration points to the nodes.
+ nonlinear: int, optional
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
+ abstract_meshed_region: MeshedRegion, optional
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ displacement: FieldsContainer or Field, optional
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -166,18 +134,17 @@ def __init__(
self.inputs.displacement.connect(displacement)
@staticmethod
- def _spec():
- description = """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are
- supported. Layered elements are not supported. Thermal
- strains are not supported. Only one value of material
- properties are allowed per element for isotropic and
- orthotropic elasticity. Material nonlinearity is not
- supported Only linear analysis are supported without On
- Demand Expansion. All coordinates are global coordinates.
- Euler Angles need to be included in the database. Get the
- YY normal component (11 component)."""
+ def _spec() -> Specification:
+ description = r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+(Full Integration) & SOLID187 elements are supported. Layered elements
+are not supported. Thermal strains are not supported. Only one value of
+material properties are allowed per element for isotropic and
+orthotropic elasticity. Material nonlinearity is not supported Only
+linear analysis are supported without On Demand Expansion. All
+coordinates are global coordinates. Euler Angles need to be included in
+the database. Get the YY normal component (11 component).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -192,90 +159,55 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.""",
+ document=r"""Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.""",
+ document=r"""Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.""",
),
5: PinSpecification(
name="extrapolate",
type_names=["int32"],
optional=True,
- document="""Whether to extrapolate the data from the
- integration points to the nodes.""",
+ document=r"""Whether to extrapolate the data from the integration points to the nodes.""",
),
6: PinSpecification(
name="nonlinear",
type_names=["int32"],
optional=True,
- document="""Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).""",
+ document=r"""Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).""",
),
7: PinSpecification(
name="abstract_meshed_region",
type_names=["abstract_meshed_region"],
optional=True,
- document="""The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.""",
+ document=r"""The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="displacement",
type_names=["fields_container", "field"],
optional=True,
- document="""Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.""",
+ document=r"""Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.""",
),
},
map_output_pin_spec={
@@ -283,15 +215,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -300,29 +231,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_total_strain_Y", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeTotalStrainY:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeTotalStrainY
+ inputs:
+ An instance of InputsComputeTotalStrainY.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeTotalStrainY:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeTotalStrainY
+ outputs:
+ An instance of OutputsComputeTotalStrainY.
"""
return super().outputs
@@ -391,30 +329,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._displacement)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -427,15 +350,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -448,17 +371,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -471,18 +392,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -495,15 +413,15 @@ def data_sources(self):
return self._data_sources
@property
- def extrapolate(self):
- """Allows to connect extrapolate input to the operator.
+ def extrapolate(self) -> Input:
+ r"""Allows to connect extrapolate input to the operator.
- Whether to extrapolate the data from the
- integration points to the nodes.
+ Whether to extrapolate the data from the integration points to the nodes.
- Parameters
- ----------
- my_extrapolate : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -516,16 +434,15 @@ def extrapolate(self):
return self._extrapolate
@property
- def nonlinear(self):
- """Allows to connect nonlinear input to the operator.
+ def nonlinear(self) -> Input:
+ r"""Allows to connect nonlinear input to the operator.
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
- Parameters
- ----------
- my_nonlinear : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -538,18 +455,15 @@ def nonlinear(self):
return self._nonlinear
@property
- def abstract_meshed_region(self):
- """Allows to connect abstract_meshed_region input to the operator.
+ def abstract_meshed_region(self) -> Input:
+ r"""Allows to connect abstract_meshed_region input to the operator.
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
- Parameters
- ----------
- my_abstract_meshed_region : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,15 +476,15 @@ def abstract_meshed_region(self):
return self._abstract_meshed_region
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -583,17 +497,15 @@ def requested_location(self):
return self._requested_location
@property
- def displacement(self):
- """Allows to connect displacement input to the operator.
+ def displacement(self) -> Input:
+ r"""Allows to connect displacement input to the operator.
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
- Parameters
- ----------
- my_displacement : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -626,18 +538,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_total_strain_Y()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_YZ.py b/src/ansys/dpf/core/operators/result/compute_total_strain_YZ.py
index 84909ac8b85..3eb4c04bc6b 100644
--- a/src/ansys/dpf/core/operators/result/compute_total_strain_YZ.py
+++ b/src/ansys/dpf/core/operators/result/compute_total_strain_YZ.py
@@ -4,86 +4,54 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_total_strain_YZ(Operator):
- """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are supported.
- Layered elements are not supported. Thermal strains are not
- supported. Only one value of material properties are allowed per
- element for isotropic and orthotropic elasticity. Material
- nonlinearity is not supported Only linear analysis are supported
- without On Demand Expansion. All coordinates are global
- coordinates. Euler Angles need to be included in the database.
- Get the YZ shear component (12 component).
+ r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+ Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+ (Full Integration) & SOLID187 elements are supported. Layered elements
+ are not supported. Thermal strains are not supported. Only one value of
+ material properties are allowed per element for isotropic and
+ orthotropic elasticity. Material nonlinearity is not supported Only
+ linear analysis are supported without On Demand Expansion. All
+ coordinates are global coordinates. Euler Angles need to be included in
+ the database. Get the YZ shear component (12 component).
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
- data_sources : DataSources
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
- extrapolate : int, optional
- Whether to extrapolate the data from the
- integration points to the nodes.
- nonlinear : int, optional
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
- abstract_meshed_region : MeshedRegion, optional
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- displacement : FieldsContainer or Field, optional
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
+ data_sources: DataSources
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
+ extrapolate: int, optional
+ Whether to extrapolate the data from the integration points to the nodes.
+ nonlinear: int, optional
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
+ abstract_meshed_region: MeshedRegion, optional
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ displacement: FieldsContainer or Field, optional
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -166,18 +134,17 @@ def __init__(
self.inputs.displacement.connect(displacement)
@staticmethod
- def _spec():
- description = """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are
- supported. Layered elements are not supported. Thermal
- strains are not supported. Only one value of material
- properties are allowed per element for isotropic and
- orthotropic elasticity. Material nonlinearity is not
- supported Only linear analysis are supported without On
- Demand Expansion. All coordinates are global coordinates.
- Euler Angles need to be included in the database. Get the
- YZ shear component (12 component)."""
+ def _spec() -> Specification:
+ description = r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+(Full Integration) & SOLID187 elements are supported. Layered elements
+are not supported. Thermal strains are not supported. Only one value of
+material properties are allowed per element for isotropic and
+orthotropic elasticity. Material nonlinearity is not supported Only
+linear analysis are supported without On Demand Expansion. All
+coordinates are global coordinates. Euler Angles need to be included in
+the database. Get the YZ shear component (12 component).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -192,90 +159,55 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.""",
+ document=r"""Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.""",
+ document=r"""Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.""",
),
5: PinSpecification(
name="extrapolate",
type_names=["int32"],
optional=True,
- document="""Whether to extrapolate the data from the
- integration points to the nodes.""",
+ document=r"""Whether to extrapolate the data from the integration points to the nodes.""",
),
6: PinSpecification(
name="nonlinear",
type_names=["int32"],
optional=True,
- document="""Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).""",
+ document=r"""Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).""",
),
7: PinSpecification(
name="abstract_meshed_region",
type_names=["abstract_meshed_region"],
optional=True,
- document="""The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.""",
+ document=r"""The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="displacement",
type_names=["fields_container", "field"],
optional=True,
- document="""Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.""",
+ document=r"""Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.""",
),
},
map_output_pin_spec={
@@ -283,15 +215,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -300,29 +231,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_total_strain_YZ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeTotalStrainYz:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeTotalStrainYz
+ inputs:
+ An instance of InputsComputeTotalStrainYz.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeTotalStrainYz:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeTotalStrainYz
+ outputs:
+ An instance of OutputsComputeTotalStrainYz.
"""
return super().outputs
@@ -391,30 +329,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._displacement)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -427,15 +350,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -448,17 +371,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -471,18 +392,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -495,15 +413,15 @@ def data_sources(self):
return self._data_sources
@property
- def extrapolate(self):
- """Allows to connect extrapolate input to the operator.
+ def extrapolate(self) -> Input:
+ r"""Allows to connect extrapolate input to the operator.
- Whether to extrapolate the data from the
- integration points to the nodes.
+ Whether to extrapolate the data from the integration points to the nodes.
- Parameters
- ----------
- my_extrapolate : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -516,16 +434,15 @@ def extrapolate(self):
return self._extrapolate
@property
- def nonlinear(self):
- """Allows to connect nonlinear input to the operator.
+ def nonlinear(self) -> Input:
+ r"""Allows to connect nonlinear input to the operator.
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
- Parameters
- ----------
- my_nonlinear : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -538,18 +455,15 @@ def nonlinear(self):
return self._nonlinear
@property
- def abstract_meshed_region(self):
- """Allows to connect abstract_meshed_region input to the operator.
+ def abstract_meshed_region(self) -> Input:
+ r"""Allows to connect abstract_meshed_region input to the operator.
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
- Parameters
- ----------
- my_abstract_meshed_region : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,15 +476,15 @@ def abstract_meshed_region(self):
return self._abstract_meshed_region
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -583,17 +497,15 @@ def requested_location(self):
return self._requested_location
@property
- def displacement(self):
- """Allows to connect displacement input to the operator.
+ def displacement(self) -> Input:
+ r"""Allows to connect displacement input to the operator.
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
- Parameters
- ----------
- my_displacement : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -626,18 +538,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_total_strain_YZ()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/compute_total_strain_Z.py b/src/ansys/dpf/core/operators/result/compute_total_strain_Z.py
index d20fd5e02bd..559ebad3f82 100644
--- a/src/ansys/dpf/core/operators/result/compute_total_strain_Z.py
+++ b/src/ansys/dpf/core/operators/result/compute_total_strain_Z.py
@@ -4,86 +4,54 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class compute_total_strain_Z(Operator):
- """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are supported.
- Layered elements are not supported. Thermal strains are not
- supported. Only one value of material properties are allowed per
- element for isotropic and orthotropic elasticity. Material
- nonlinearity is not supported Only linear analysis are supported
- without On Demand Expansion. All coordinates are global
- coordinates. Euler Angles need to be included in the database.
- Get the ZZ normal component (22 component).
+ r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+ Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+ (Full Integration) & SOLID187 elements are supported. Layered elements
+ are not supported. Thermal strains are not supported. Only one value of
+ material properties are allowed per element for isotropic and
+ orthotropic elasticity. Material nonlinearity is not supported Only
+ linear analysis are supported without On Demand Expansion. All
+ coordinates are global coordinates. Euler Angles need to be included in
+ the database. Get the ZZ normal component (22 component).
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
- scoping : Scoping, optional
- The element scoping on which the result is
- computed.
- streams_container : StreamsContainer, optional
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
- data_sources : DataSources
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
- extrapolate : int, optional
- Whether to extrapolate the data from the
- integration points to the nodes.
- nonlinear : int, optional
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
- abstract_meshed_region : MeshedRegion, optional
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
- requested_location : str, optional
- Average the elemental nodal result to the
- requested location.
- displacement : FieldsContainer or Field, optional
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+ scoping: Scoping, optional
+ The element scoping on which the result is computed.
+ streams_container: StreamsContainer, optional
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
+ data_sources: DataSources
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
+ extrapolate: int, optional
+ Whether to extrapolate the data from the integration points to the nodes.
+ nonlinear: int, optional
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
+ abstract_meshed_region: MeshedRegion, optional
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
+ requested_location: str, optional
+ Average the Elemental Nodal result to the requested location.
+ displacement: FieldsContainer or Field, optional
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
Returns
-------
- fields_container : FieldsContainer
- The computed result fields container
- (elemental nodal).
+ fields_container: FieldsContainer
+ The computed result fields container (elemental nodal).
Examples
--------
@@ -166,18 +134,17 @@ def __init__(
self.inputs.displacement.connect(displacement)
@staticmethod
- def _spec():
- description = """Computes the strain from a displacement field. Only SOLID185 (B-Bar,
- Simplified Enhanced Strain, Enhanced Strain formulations),
- SOLID186 (Full Integration) & SOLID187 elements are
- supported. Layered elements are not supported. Thermal
- strains are not supported. Only one value of material
- properties are allowed per element for isotropic and
- orthotropic elasticity. Material nonlinearity is not
- supported Only linear analysis are supported without On
- Demand Expansion. All coordinates are global coordinates.
- Euler Angles need to be included in the database. Get the
- ZZ normal component (22 component)."""
+ def _spec() -> Specification:
+ description = r"""Computes the strain from a displacement field. Only SOLID185 (B-Bar,
+Simplified Enhanced Strain, Enhanced Strain formulations), SOLID186
+(Full Integration) & SOLID187 elements are supported. Layered elements
+are not supported. Thermal strains are not supported. Only one value of
+material properties are allowed per element for isotropic and
+orthotropic elasticity. Material nonlinearity is not supported Only
+linear analysis are supported without On Demand Expansion. All
+coordinates are global coordinates. Euler Angles need to be included in
+the database. Get the ZZ normal component (22 component).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -192,90 +159,55 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).""",
),
1: PinSpecification(
name="scoping",
type_names=["scoping"],
optional=True,
- document="""The element scoping on which the result is
- computed.""",
+ document=r"""The element scoping on which the result is computed.""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.""",
+ document=r"""Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.""",
+ document=r"""Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.""",
),
5: PinSpecification(
name="extrapolate",
type_names=["int32"],
optional=True,
- document="""Whether to extrapolate the data from the
- integration points to the nodes.""",
+ document=r"""Whether to extrapolate the data from the integration points to the nodes.""",
),
6: PinSpecification(
name="nonlinear",
type_names=["int32"],
optional=True,
- document="""Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).""",
+ document=r"""Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).""",
),
7: PinSpecification(
name="abstract_meshed_region",
type_names=["abstract_meshed_region"],
optional=True,
- document="""The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.""",
+ document=r"""The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Average the elemental nodal result to the
- requested location.""",
+ document=r"""Average the Elemental Nodal result to the requested location.""",
),
10: PinSpecification(
name="displacement",
type_names=["fields_container", "field"],
optional=True,
- document="""Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.""",
+ document=r"""Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.""",
),
},
map_output_pin_spec={
@@ -283,15 +215,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The computed result fields container
- (elemental nodal).""",
+ document=r"""The computed result fields container (elemental nodal).""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -300,29 +231,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="compute_total_strain_Z", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsComputeTotalStrainZ:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsComputeTotalStrainZ
+ inputs:
+ An instance of InputsComputeTotalStrainZ.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsComputeTotalStrainZ:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsComputeTotalStrainZ
+ outputs:
+ An instance of OutputsComputeTotalStrainZ.
"""
return super().outputs
@@ -391,30 +329,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._displacement)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.. will only be used if no
- displacement input is given (will be
- applied on displacement operator).
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.. Will only be used if no displacement input is given (will be applied on displacement operator).
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -427,15 +350,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def scoping(self):
- """Allows to connect scoping input to the operator.
+ def scoping(self) -> Input:
+ r"""Allows to connect scoping input to the operator.
- The element scoping on which the result is
- computed.
+ The element scoping on which the result is computed.
- Parameters
- ----------
- my_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -448,17 +371,15 @@ def scoping(self):
return self._scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Optional if a mesh or a data_sources have
- been connected. required if no
- displacement input have been
- connected.
+ Optional if a mesh or a data_sources have been connected. Required if no displacement input have been connected.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -471,18 +392,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Optional if a mesh or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support. required if no displacement
- input have been connected.
+ Optional if a mesh or a streams_container have been connected, or if the displacement's field has a mesh support. Required if no displacement input have been connected.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -495,15 +413,15 @@ def data_sources(self):
return self._data_sources
@property
- def extrapolate(self):
- """Allows to connect extrapolate input to the operator.
+ def extrapolate(self) -> Input:
+ r"""Allows to connect extrapolate input to the operator.
- Whether to extrapolate the data from the
- integration points to the nodes.
+ Whether to extrapolate the data from the integration points to the nodes.
- Parameters
- ----------
- my_extrapolate : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -516,16 +434,15 @@ def extrapolate(self):
return self._extrapolate
@property
- def nonlinear(self):
- """Allows to connect nonlinear input to the operator.
+ def nonlinear(self) -> Input:
+ r"""Allows to connect nonlinear input to the operator.
- Whether to use nonlinear geometry or
- nonlinear material (1 = large strain,
- 2 = hyperelasticity).
+ Whether to use nonlinear geometry or nonlinear material (1 = large strain, 2 = hyperelasticity).
- Parameters
- ----------
- my_nonlinear : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -538,18 +455,15 @@ def nonlinear(self):
return self._nonlinear
@property
- def abstract_meshed_region(self):
- """Allows to connect abstract_meshed_region input to the operator.
+ def abstract_meshed_region(self) -> Input:
+ r"""Allows to connect abstract_meshed_region input to the operator.
- The underlying mesh. optional if a
- data_sources or a streams_container
- have been connected, or if the
- displacement's field has a mesh
- support.
+ The underlying mesh. Optional if a data_sources or a streams_container have been connected, or if the displacement's field has a mesh support.
- Parameters
- ----------
- my_abstract_meshed_region : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,15 +476,15 @@ def abstract_meshed_region(self):
return self._abstract_meshed_region
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Average the elemental nodal result to the
- requested location.
+ Average the Elemental Nodal result to the requested location.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -583,17 +497,15 @@ def requested_location(self):
return self._requested_location
@property
- def displacement(self):
- """Allows to connect displacement input to the operator.
+ def displacement(self) -> Input:
+ r"""Allows to connect displacement input to the operator.
- Field/or fields container containing only the
- displacement field (nodal). if none
- specified, read displacements from
- result file using the data_sources.
+ Field/or fields container containing only the displacement field (nodal). If none specified, read displacements from result file using the data_sources.
- Parameters
- ----------
- my_displacement : FieldsContainer or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -626,18 +538,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ The computed result fields container (elemental nodal).
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.compute_total_strain_Z()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/contact_fluid_penetration_pressure.py b/src/ansys/dpf/core/operators/result/contact_fluid_penetration_pressure.py
index e7226478452..79fe68d8310 100644
--- a/src/ansys/dpf/core/operators/result/contact_fluid_penetration_pressure.py
+++ b/src/ansys/dpf/core/operators/result/contact_fluid_penetration_pressure.py
@@ -4,108 +4,60 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class contact_fluid_penetration_pressure(Operator):
- """Read/compute element actual applied fluid penetration pressure by
- calling the readers defined by the datasources. Regarding the
- requested location and the input mesh scoping, the result location
- can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element actual applied fluid penetration pressure by
+ calling the readers defined by the datasources. Regarding the requested
+ location and the input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded, use if cyclic expansion is to
- be done.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
- phi : float, optional
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ mesh. If cylic expansion is to be done, mesh of the base sector
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded, use if cyclic expansion is to be done.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
+ phi: float, optional
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -224,11 +176,12 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element actual applied fluid penetration pressure by
- calling the readers defined by the datasources. Regarding
- the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element actual applied fluid penetration pressure by
+calling the readers defined by the datasources. Regarding the requested
+location and the input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -243,144 +196,91 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh. if cylic expansion is to be done, mesh
- of the base sector""",
+ document=r"""mesh. If cylic expansion is to be done, mesh of the base sector""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded, use if cyclic expansion is to
- be done.""",
+ document=r"""mesh expanded, use if cyclic expansion is to be done.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.""",
+ document=r"""angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -388,14 +288,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -404,29 +304,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ECT_FRES", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsContactFluidPenetrationPressure:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsContactFluidPenetrationPressure
+ inputs:
+ An instance of InputsContactFluidPenetrationPressure.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsContactFluidPenetrationPressure:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsContactFluidPenetrationPressure
+ outputs:
+ An instance of OutputsContactFluidPenetrationPressure.
"""
return super().outputs
@@ -535,28 +442,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -569,24 +463,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -599,15 +484,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -620,15 +505,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -641,15 +526,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -662,15 +547,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -683,15 +568,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
+ mesh. If cylic expansion is to be done, mesh of the base sector
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -704,15 +589,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -725,18 +610,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -749,15 +631,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded, use if cyclic expansion is to
- be done.
+ mesh expanded, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -770,17 +652,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -793,15 +673,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -814,15 +694,15 @@ def phi(self):
return self._phi
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -835,20 +715,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -861,21 +736,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -908,18 +777,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.contact_fluid_penetration_pressure()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/contact_friction_stress.py b/src/ansys/dpf/core/operators/result/contact_friction_stress.py
index 869ea7da166..53c3f7df8c9 100644
--- a/src/ansys/dpf/core/operators/result/contact_friction_stress.py
+++ b/src/ansys/dpf/core/operators/result/contact_friction_stress.py
@@ -4,108 +4,60 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class contact_friction_stress(Operator):
- """Read/compute element contact friction stress by calling the readers
- defined by the datasources. Regarding the requested location and
- the input mesh scoping, the result location can be
+ r"""Read/compute element contact friction stress by calling the readers
+ defined by the datasources. Regarding the requested location and the
+ input mesh scoping, the result location can be
Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded, use if cyclic expansion is to
- be done.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
- phi : float, optional
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ mesh. If cylic expansion is to be done, mesh of the base sector
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded, use if cyclic expansion is to be done.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
+ phi: float, optional
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -224,11 +176,12 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element contact friction stress by calling the readers
- defined by the datasources. Regarding the requested
- location and the input mesh scoping, the result location
- can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element contact friction stress by calling the readers
+defined by the datasources. Regarding the requested location and the
+input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -243,144 +196,91 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh. if cylic expansion is to be done, mesh
- of the base sector""",
+ document=r"""mesh. If cylic expansion is to be done, mesh of the base sector""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded, use if cyclic expansion is to
- be done.""",
+ document=r"""mesh expanded, use if cyclic expansion is to be done.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.""",
+ document=r"""angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -388,14 +288,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -404,29 +304,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ECT_SFRIC", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsContactFrictionStress:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsContactFrictionStress
+ inputs:
+ An instance of InputsContactFrictionStress.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsContactFrictionStress:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsContactFrictionStress
+ outputs:
+ An instance of OutputsContactFrictionStress.
"""
return super().outputs
@@ -531,28 +438,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,24 +459,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -595,15 +480,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -616,15 +501,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -637,15 +522,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -658,15 +543,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -679,15 +564,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
+ mesh. If cylic expansion is to be done, mesh of the base sector
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -700,15 +585,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -721,18 +606,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -745,15 +627,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded, use if cyclic expansion is to
- be done.
+ mesh expanded, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -766,17 +648,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -789,15 +669,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -810,15 +690,15 @@ def phi(self):
return self._phi
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -831,20 +711,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -857,21 +732,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -904,18 +773,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.contact_friction_stress()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/contact_gap_distance.py b/src/ansys/dpf/core/operators/result/contact_gap_distance.py
index 99dfd522bc1..c480a8afe63 100644
--- a/src/ansys/dpf/core/operators/result/contact_gap_distance.py
+++ b/src/ansys/dpf/core/operators/result/contact_gap_distance.py
@@ -4,108 +4,59 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class contact_gap_distance(Operator):
- """Read/compute element contact gap distance by calling the readers
- defined by the datasources. Regarding the requested location and
- the input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element contact gap distance by calling the readers defined
+ by the datasources. Regarding the requested location and the input mesh
+ scoping, the result location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded, use if cyclic expansion is to
- be done.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
- phi : float, optional
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ mesh. If cylic expansion is to be done, mesh of the base sector
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded, use if cyclic expansion is to be done.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
+ phi: float, optional
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -224,11 +175,11 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element contact gap distance by calling the readers
- defined by the datasources. Regarding the requested
- location and the input mesh scoping, the result location
- can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element contact gap distance by calling the readers defined
+by the datasources. Regarding the requested location and the input mesh
+scoping, the result location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -243,144 +194,91 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh. if cylic expansion is to be done, mesh
- of the base sector""",
+ document=r"""mesh. If cylic expansion is to be done, mesh of the base sector""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded, use if cyclic expansion is to
- be done.""",
+ document=r"""mesh expanded, use if cyclic expansion is to be done.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.""",
+ document=r"""angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -388,14 +286,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -404,29 +302,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ECT_GAP", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsContactGapDistance:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsContactGapDistance
+ inputs:
+ An instance of InputsContactGapDistance.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsContactGapDistance:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsContactGapDistance
+ outputs:
+ An instance of OutputsContactGapDistance.
"""
return super().outputs
@@ -523,28 +428,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -557,24 +449,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -587,15 +470,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -608,15 +491,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -629,15 +512,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -650,15 +533,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -671,15 +554,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
+ mesh. If cylic expansion is to be done, mesh of the base sector
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -692,15 +575,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -713,18 +596,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -737,15 +617,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded, use if cyclic expansion is to
- be done.
+ mesh expanded, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -758,17 +638,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -781,15 +659,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -802,15 +680,15 @@ def phi(self):
return self._phi
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -823,20 +701,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -849,21 +722,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -896,18 +763,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.contact_gap_distance()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/contact_penetration.py b/src/ansys/dpf/core/operators/result/contact_penetration.py
index 1edeed3f7c6..ff6336e7f5e 100644
--- a/src/ansys/dpf/core/operators/result/contact_penetration.py
+++ b/src/ansys/dpf/core/operators/result/contact_penetration.py
@@ -4,108 +4,59 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class contact_penetration(Operator):
- """Read/compute element contact penetration by calling the readers
- defined by the datasources. Regarding the requested location and
- the input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element contact penetration by calling the readers defined
+ by the datasources. Regarding the requested location and the input mesh
+ scoping, the result location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded, use if cyclic expansion is to
- be done.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
- phi : float, optional
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ mesh. If cylic expansion is to be done, mesh of the base sector
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded, use if cyclic expansion is to be done.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
+ phi: float, optional
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -224,11 +175,11 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element contact penetration by calling the readers
- defined by the datasources. Regarding the requested
- location and the input mesh scoping, the result location
- can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element contact penetration by calling the readers defined
+by the datasources. Regarding the requested location and the input mesh
+scoping, the result location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -243,144 +194,91 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh. if cylic expansion is to be done, mesh
- of the base sector""",
+ document=r"""mesh. If cylic expansion is to be done, mesh of the base sector""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded, use if cyclic expansion is to
- be done.""",
+ document=r"""mesh expanded, use if cyclic expansion is to be done.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.""",
+ document=r"""angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -388,14 +286,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -404,29 +302,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ECT_PENE", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsContactPenetration:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsContactPenetration
+ inputs:
+ An instance of InputsContactPenetration.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsContactPenetration:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsContactPenetration
+ outputs:
+ An instance of OutputsContactPenetration.
"""
return super().outputs
@@ -519,28 +424,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -553,24 +445,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -583,15 +466,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -604,15 +487,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -625,15 +508,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -646,15 +529,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -667,15 +550,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
+ mesh. If cylic expansion is to be done, mesh of the base sector
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -688,15 +571,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -709,18 +592,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -733,15 +613,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded, use if cyclic expansion is to
- be done.
+ mesh expanded, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -754,17 +634,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -777,15 +655,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -798,15 +676,15 @@ def phi(self):
return self._phi
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -819,20 +697,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -845,21 +718,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -892,18 +759,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.contact_penetration()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/contact_pressure.py b/src/ansys/dpf/core/operators/result/contact_pressure.py
index 9f2db8be176..ff66e1d885e 100644
--- a/src/ansys/dpf/core/operators/result/contact_pressure.py
+++ b/src/ansys/dpf/core/operators/result/contact_pressure.py
@@ -4,108 +4,59 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class contact_pressure(Operator):
- """Read/compute element contact pressure by calling the readers defined
- by the datasources. Regarding the requested location and the input
- mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element contact pressure by calling the readers defined by
+ the datasources. Regarding the requested location and the input mesh
+ scoping, the result location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded, use if cyclic expansion is to
- be done.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
- phi : float, optional
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ mesh. If cylic expansion is to be done, mesh of the base sector
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded, use if cyclic expansion is to be done.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
+ phi: float, optional
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -224,11 +175,11 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element contact pressure by calling the readers defined
- by the datasources. Regarding the requested location and
- the input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element contact pressure by calling the readers defined by
+the datasources. Regarding the requested location and the input mesh
+scoping, the result location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -243,144 +194,91 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh. if cylic expansion is to be done, mesh
- of the base sector""",
+ document=r"""mesh. If cylic expansion is to be done, mesh of the base sector""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded, use if cyclic expansion is to
- be done.""",
+ document=r"""mesh expanded, use if cyclic expansion is to be done.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.""",
+ document=r"""angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -388,14 +286,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -404,29 +302,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ECT_PRES", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsContactPressure:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsContactPressure
+ inputs:
+ An instance of InputsContactPressure.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsContactPressure:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsContactPressure
+ outputs:
+ An instance of OutputsContactPressure.
"""
return super().outputs
@@ -515,28 +420,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -549,24 +441,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -579,15 +462,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -600,15 +483,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -621,15 +504,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -642,15 +525,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -663,15 +546,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
+ mesh. If cylic expansion is to be done, mesh of the base sector
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -684,15 +567,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -705,18 +588,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -729,15 +609,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded, use if cyclic expansion is to
- be done.
+ mesh expanded, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -750,17 +630,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -773,15 +651,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -794,15 +672,15 @@ def phi(self):
return self._phi
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -815,20 +693,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -841,21 +714,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -886,18 +753,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.contact_pressure()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/contact_sliding_distance.py b/src/ansys/dpf/core/operators/result/contact_sliding_distance.py
index 6f0f93fac92..a8113f583eb 100644
--- a/src/ansys/dpf/core/operators/result/contact_sliding_distance.py
+++ b/src/ansys/dpf/core/operators/result/contact_sliding_distance.py
@@ -4,108 +4,60 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class contact_sliding_distance(Operator):
- """Read/compute element contact sliding distance by calling the readers
- defined by the datasources. Regarding the requested location and
- the input mesh scoping, the result location can be
+ r"""Read/compute element contact sliding distance by calling the readers
+ defined by the datasources. Regarding the requested location and the
+ input mesh scoping, the result location can be
Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded, use if cyclic expansion is to
- be done.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
- phi : float, optional
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ mesh. If cylic expansion is to be done, mesh of the base sector
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded, use if cyclic expansion is to be done.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
+ phi: float, optional
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -224,11 +176,12 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element contact sliding distance by calling the readers
- defined by the datasources. Regarding the requested
- location and the input mesh scoping, the result location
- can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element contact sliding distance by calling the readers
+defined by the datasources. Regarding the requested location and the
+input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -243,144 +196,91 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh. if cylic expansion is to be done, mesh
- of the base sector""",
+ document=r"""mesh. If cylic expansion is to be done, mesh of the base sector""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded, use if cyclic expansion is to
- be done.""",
+ document=r"""mesh expanded, use if cyclic expansion is to be done.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.""",
+ document=r"""angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -388,14 +288,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -404,29 +304,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ECT_SLIDE", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsContactSlidingDistance:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsContactSlidingDistance
+ inputs:
+ An instance of InputsContactSlidingDistance.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsContactSlidingDistance:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsContactSlidingDistance
+ outputs:
+ An instance of OutputsContactSlidingDistance.
"""
return super().outputs
@@ -531,28 +438,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,24 +459,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -595,15 +480,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -616,15 +501,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -637,15 +522,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -658,15 +543,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -679,15 +564,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
+ mesh. If cylic expansion is to be done, mesh of the base sector
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -700,15 +585,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -721,18 +606,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -745,15 +627,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded, use if cyclic expansion is to
- be done.
+ mesh expanded, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -766,17 +648,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -789,15 +669,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -810,15 +690,15 @@ def phi(self):
return self._phi
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -831,20 +711,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -857,21 +732,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -904,18 +773,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.contact_sliding_distance()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/contact_status.py b/src/ansys/dpf/core/operators/result/contact_status.py
index 60c40ad8ddb..40347c3ff61 100644
--- a/src/ansys/dpf/core/operators/result/contact_status.py
+++ b/src/ansys/dpf/core/operators/result/contact_status.py
@@ -4,108 +4,59 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class contact_status(Operator):
- """Read/compute element contact status by calling the readers defined by
- the datasources. Regarding the requested location and the input
- mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element contact status by calling the readers defined by
+ the datasources. Regarding the requested location and the input mesh
+ scoping, the result location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded, use if cyclic expansion is to
- be done.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
- phi : float, optional
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ mesh. If cylic expansion is to be done, mesh of the base sector
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded, use if cyclic expansion is to be done.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
+ phi: float, optional
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -224,11 +175,11 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element contact status by calling the readers defined by
- the datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element contact status by calling the readers defined by
+the datasources. Regarding the requested location and the input mesh
+scoping, the result location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -243,144 +194,91 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh. if cylic expansion is to be done, mesh
- of the base sector""",
+ document=r"""mesh. If cylic expansion is to be done, mesh of the base sector""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded, use if cyclic expansion is to
- be done.""",
+ document=r"""mesh expanded, use if cyclic expansion is to be done.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.""",
+ document=r"""angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -388,14 +286,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -404,29 +302,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ECT_STAT", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsContactStatus:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsContactStatus
+ inputs:
+ An instance of InputsContactStatus.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsContactStatus:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsContactStatus
+ outputs:
+ An instance of OutputsContactStatus.
"""
return super().outputs
@@ -511,28 +416,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -545,24 +437,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -575,15 +458,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -596,15 +479,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -617,15 +500,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -638,15 +521,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -659,15 +542,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
+ mesh. If cylic expansion is to be done, mesh of the base sector
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -680,15 +563,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -701,18 +584,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -725,15 +605,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded, use if cyclic expansion is to
- be done.
+ mesh expanded, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -746,17 +626,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -769,15 +647,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -790,15 +668,15 @@ def phi(self):
return self._phi
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -811,20 +689,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -837,21 +710,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -882,18 +749,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.contact_status()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/contact_surface_heat_flux.py b/src/ansys/dpf/core/operators/result/contact_surface_heat_flux.py
index 8f57effa53f..c5378569375 100644
--- a/src/ansys/dpf/core/operators/result/contact_surface_heat_flux.py
+++ b/src/ansys/dpf/core/operators/result/contact_surface_heat_flux.py
@@ -4,108 +4,60 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class contact_surface_heat_flux(Operator):
- """Read/compute element total heat flux at contact surface by calling the
- readers defined by the datasources. Regarding the requested
- location and the input mesh scoping, the result location can be
+ r"""Read/compute element total heat flux at contact surface by calling the
+ readers defined by the datasources. Regarding the requested location and
+ the input mesh scoping, the result location can be
Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded, use if cyclic expansion is to
- be done.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
- phi : float, optional
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ mesh. If cylic expansion is to be done, mesh of the base sector
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded, use if cyclic expansion is to be done.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
+ phi: float, optional
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -224,11 +176,12 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element total heat flux at contact surface by calling the
- readers defined by the datasources. Regarding the
- requested location and the input mesh scoping, the result
- location can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element total heat flux at contact surface by calling the
+readers defined by the datasources. Regarding the requested location and
+the input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -243,144 +196,91 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh. if cylic expansion is to be done, mesh
- of the base sector""",
+ document=r"""mesh. If cylic expansion is to be done, mesh of the base sector""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded, use if cyclic expansion is to
- be done.""",
+ document=r"""mesh expanded, use if cyclic expansion is to be done.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.""",
+ document=r"""angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -388,14 +288,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -404,29 +304,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ECT_FLUX", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsContactSurfaceHeatFlux:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsContactSurfaceHeatFlux
+ inputs:
+ An instance of InputsContactSurfaceHeatFlux.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsContactSurfaceHeatFlux:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsContactSurfaceHeatFlux
+ outputs:
+ An instance of OutputsContactSurfaceHeatFlux.
"""
return super().outputs
@@ -531,28 +438,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,24 +459,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -595,15 +480,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -616,15 +501,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -637,15 +522,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -658,15 +543,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -679,15 +564,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
+ mesh. If cylic expansion is to be done, mesh of the base sector
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -700,15 +585,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -721,18 +606,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -745,15 +627,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded, use if cyclic expansion is to
- be done.
+ mesh expanded, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -766,17 +648,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -789,15 +669,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -810,15 +690,15 @@ def phi(self):
return self._phi
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -831,20 +711,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -857,21 +732,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -904,18 +773,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.contact_surface_heat_flux()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/contact_total_stress.py b/src/ansys/dpf/core/operators/result/contact_total_stress.py
index 314141a746a..57dd3b1d787 100644
--- a/src/ansys/dpf/core/operators/result/contact_total_stress.py
+++ b/src/ansys/dpf/core/operators/result/contact_total_stress.py
@@ -4,108 +4,60 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class contact_total_stress(Operator):
- """Read/compute element contact total stress (pressure plus friction) by
- calling the readers defined by the datasources. Regarding the
- requested location and the input mesh scoping, the result location
- can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element contact total stress (pressure plus friction) by
+ calling the readers defined by the datasources. Regarding the requested
+ location and the input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded, use if cyclic expansion is to
- be done.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
- phi : float, optional
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ mesh. If cylic expansion is to be done, mesh of the base sector
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded, use if cyclic expansion is to be done.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
+ phi: float, optional
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -224,11 +176,12 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element contact total stress (pressure plus friction) by
- calling the readers defined by the datasources. Regarding
- the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element contact total stress (pressure plus friction) by
+calling the readers defined by the datasources. Regarding the requested
+location and the input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -243,144 +196,91 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh. if cylic expansion is to be done, mesh
- of the base sector""",
+ document=r"""mesh. If cylic expansion is to be done, mesh of the base sector""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded, use if cyclic expansion is to
- be done.""",
+ document=r"""mesh expanded, use if cyclic expansion is to be done.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.""",
+ document=r"""angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -388,14 +288,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -404,29 +304,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ECT_STOT", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsContactTotalStress:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsContactTotalStress
+ inputs:
+ An instance of InputsContactTotalStress.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsContactTotalStress:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsContactTotalStress
+ outputs:
+ An instance of OutputsContactTotalStress.
"""
return super().outputs
@@ -523,28 +430,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -557,24 +451,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -587,15 +472,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -608,15 +493,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -629,15 +514,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -650,15 +535,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -671,15 +556,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
+ mesh. If cylic expansion is to be done, mesh of the base sector
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -692,15 +577,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -713,18 +598,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -737,15 +619,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded, use if cyclic expansion is to
- be done.
+ mesh expanded, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -758,17 +640,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -781,15 +661,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -802,15 +682,15 @@ def phi(self):
return self._phi
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -823,20 +703,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -849,21 +724,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -896,18 +765,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.contact_total_stress()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/coordinate_system.py b/src/ansys/dpf/core/operators/result/coordinate_system.py
index 05929c86344..94da8669e47 100644
--- a/src/ansys/dpf/core/operators/result/coordinate_system.py
+++ b/src/ansys/dpf/core/operators/result/coordinate_system.py
@@ -4,29 +4,31 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class coordinate_system(Operator):
- """Extracts the Rotation Matrix and Origin of a specific coordinate
- system.
+ r"""Extracts the Rotation Matrix and Origin of a specific coordinate system.
+
Parameters
----------
- cs_id : int
- streams_container : StreamsContainer, optional
- data_sources : DataSources
+ cs_id: int
+ streams_container: StreamsContainer, optional
+ data_sources: DataSources
Returns
-------
- field : Field
- The first 9 double are the rotation (3x3
- matrix) and the last 3 is the
- translation vector
+ field: Field
+ the first 9 double are the rotation (3x3 matrix) and the last 3 is the translation vector
Examples
--------
@@ -73,9 +75,9 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """Extracts the Rotation Matrix and Origin of a specific coordinate
- system."""
+ def _spec() -> Specification:
+ description = r"""Extracts the Rotation Matrix and Origin of a specific coordinate system.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -83,19 +85,19 @@ def _spec():
name="cs_id",
type_names=["int32"],
optional=False,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""""",
+ document=r"""""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -103,16 +105,14 @@ def _spec():
name="field",
type_names=["field"],
optional=False,
- document="""The first 9 double are the rotation (3x3
- matrix) and the last 3 is the
- translation vector""",
+ document=r"""the first 9 double are the rotation (3x3 matrix) and the last 3 is the translation vector""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -121,29 +121,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="CS", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCoordinateSystem:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCoordinateSystem
+ inputs:
+ An instance of InputsCoordinateSystem.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCoordinateSystem:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCoordinateSystem
+ outputs:
+ An instance of OutputsCoordinateSystem.
"""
return super().outputs
@@ -176,12 +183,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def cs_id(self):
- """Allows to connect cs_id input to the operator.
+ def cs_id(self) -> Input:
+ r"""Allows to connect cs_id input to the operator.
- Parameters
- ----------
- my_cs_id : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -194,12 +202,13 @@ def cs_id(self):
return self._cs_id
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -212,12 +221,13 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -248,18 +258,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._field)
@property
- def field(self):
- """Allows to get field output of the operator
+ def field(self) -> Output:
+ r"""Allows to get field output of the operator
+
+ the first 9 double are the rotation (3x3 matrix) and the last 3 is the translation vector
Returns
- ----------
- my_field : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.coordinate_system()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field = op.outputs.field()
- """ # noqa: E501
+ """
return self._field
diff --git a/src/ansys/dpf/core/operators/result/coordinates.py b/src/ansys/dpf/core/operators/result/coordinates.py
index 1b3fcb9adb3..931950fe66b 100644
--- a/src/ansys/dpf/core/operators/result/coordinates.py
+++ b/src/ansys/dpf/core/operators/result/coordinates.py
@@ -4,66 +4,42 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class coordinates(Operator):
- """Read/compute Coordinates (LSDyna) by calling the readers defined by
- the datasources.
+ r"""Read/compute Coordinates (LSDyna) by calling the readers defined by the
+ datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -134,9 +110,10 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Read/compute Coordinates (LSDyna) by calling the readers defined by
- the datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute Coordinates (LSDyna) by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -151,72 +128,43 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
},
map_output_pin_spec={
@@ -224,14 +172,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -240,29 +188,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="X", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCoordinates:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCoordinates
+ inputs:
+ An instance of InputsCoordinates.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCoordinates:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCoordinates
+ outputs:
+ An instance of OutputsCoordinates.
"""
return super().outputs
@@ -309,28 +264,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -343,24 +285,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -373,15 +306,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -394,15 +327,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -415,15 +348,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -436,15 +369,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -457,15 +390,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -496,18 +429,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.coordinates()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/creep_strain_energy_density.py b/src/ansys/dpf/core/operators/result/creep_strain_energy_density.py
index c4f98d6a038..2c3920a2f3c 100644
--- a/src/ansys/dpf/core/operators/result/creep_strain_energy_density.py
+++ b/src/ansys/dpf/core/operators/result/creep_strain_energy_density.py
@@ -4,91 +4,52 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class creep_strain_energy_density(Operator):
- """Read/compute element nodal creep strain energy density by calling the
- readers defined by the datasources. Regarding the requested
- location and the input mesh scoping, the result location can be
+ r"""Read/compute element nodal creep strain energy density by calling the
+ readers defined by the datasources. Regarding the requested location and
+ the input mesh scoping, the result location can be
Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -183,11 +144,12 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element nodal creep strain energy density by calling the
- readers defined by the datasources. Regarding the
- requested location and the input mesh scoping, the result
- location can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal creep strain energy density by calling the
+readers defined by the datasources. Regarding the requested location and
+the input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -202,111 +164,67 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -314,14 +232,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -330,29 +248,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ENL_CRWK", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCreepStrainEnergyDensity:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCreepStrainEnergyDensity
+ inputs:
+ An instance of InputsCreepStrainEnergyDensity.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCreepStrainEnergyDensity:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCreepStrainEnergyDensity
+ outputs:
+ An instance of OutputsCreepStrainEnergyDensity.
"""
return super().outputs
@@ -435,28 +360,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -469,24 +381,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -499,15 +402,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -520,15 +423,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -541,15 +444,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,15 +465,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -583,15 +486,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -604,15 +507,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -625,15 +528,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -646,20 +549,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -672,21 +570,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -719,18 +611,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.creep_strain_energy_density()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/current_density.py b/src/ansys/dpf/core/operators/result/current_density.py
index 5c530bbb01a..7b9d452ff88 100644
--- a/src/ansys/dpf/core/operators/result/current_density.py
+++ b/src/ansys/dpf/core/operators/result/current_density.py
@@ -4,66 +4,42 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class current_density(Operator):
- """Read/compute Current Density by calling the readers defined by the
+ r"""Read/compute Current Density by calling the readers defined by the
datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -134,9 +110,10 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Read/compute Current Density by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute Current Density by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -151,72 +128,43 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
},
map_output_pin_spec={
@@ -224,14 +172,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -240,29 +188,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ECD", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCurrentDensity:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCurrentDensity
+ inputs:
+ An instance of InputsCurrentDensity.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCurrentDensity:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCurrentDensity
+ outputs:
+ An instance of OutputsCurrentDensity.
"""
return super().outputs
@@ -311,28 +266,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -345,24 +287,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -375,15 +308,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -396,15 +329,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -417,15 +350,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -438,15 +371,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -459,15 +392,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -498,18 +431,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.current_density()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/custom.py b/src/ansys/dpf/core/operators/result/custom.py
index 8648059fa73..26773c3a05b 100644
--- a/src/ansys/dpf/core/operators/result/custom.py
+++ b/src/ansys/dpf/core/operators/result/custom.py
@@ -4,69 +4,44 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class custom(Operator):
- """Read/compute user defined result by calling the readers defined by the
+ r"""Read/compute user defined result by calling the readers defined by the
datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- result_name :
- Name of the result that must be extracted
- from the file
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ result_name:
+ Name of the result that must be extracted from the file
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -143,9 +118,10 @@ def __init__(
self.inputs.result_name.connect(result_name)
@staticmethod
- def _spec():
- description = """Read/compute user defined result by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute user defined result by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -160,79 +136,49 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
60: PinSpecification(
name="result_name",
type_names=["any"],
optional=False,
- document="""Name of the result that must be extracted
- from the file""",
+ document=r"""Name of the result that must be extracted from the file""",
),
},
map_output_pin_spec={
@@ -240,14 +186,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -256,29 +202,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="custom", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCustom:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCustom
+ inputs:
+ An instance of InputsCustom.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCustom:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCustom
+ outputs:
+ An instance of OutputsCustom.
"""
return super().outputs
@@ -329,28 +282,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._result_name)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -363,24 +303,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -393,15 +324,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -414,15 +345,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -435,15 +366,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -456,15 +387,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -477,15 +408,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -498,15 +429,15 @@ def mesh(self):
return self._mesh
@property
- def result_name(self):
- """Allows to connect result_name input to the operator.
+ def result_name(self) -> Input:
+ r"""Allows to connect result_name input to the operator.
- Name of the result that must be extracted
- from the file
+ Name of the result that must be extracted from the file
- Parameters
- ----------
- my_result_name :
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -537,18 +468,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.custom()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/cyclic_analytic_seqv_max.py b/src/ansys/dpf/core/operators/result/cyclic_analytic_seqv_max.py
index 0141a2605e6..bbb6edddbd2 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_analytic_seqv_max.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_analytic_seqv_max.py
@@ -4,32 +4,36 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_analytic_seqv_max(Operator):
- """Compute the maximum of the Von Mises equivalent stress that can be
+ r"""Compute the maximum of the Von Mises equivalent stress that can be
expected on 360 degrees
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer
- Field container with the base and duplicate
- sectors
- bool_rotate_to_global : bool, optional
- Default is true
- cyclic_support : CyclicSupport
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer
+ field container with the base and duplicate sectors
+ bool_rotate_to_global: bool, optional
+ default is true
+ cyclic_support: CyclicSupport
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
+ fields_container: FieldsContainer
+ FieldsContainer filled in
Examples
--------
@@ -90,9 +94,10 @@ def __init__(
self.inputs.cyclic_support.connect(cyclic_support)
@staticmethod
- def _spec():
- description = """Compute the maximum of the Von Mises equivalent stress that can be
- expected on 360 degrees"""
+ def _spec() -> Specification:
+ description = r"""Compute the maximum of the Von Mises equivalent stress that can be
+expected on 360 degrees
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -100,32 +105,31 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field container with the base and duplicate
- sectors""",
+ document=r"""field container with the base and duplicate sectors""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""Default is true""",
+ document=r"""default is true""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -133,14 +137,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -149,31 +153,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="cyclic_analytic_stress_eqv_max", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicAnalyticSeqvMax:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicAnalyticSeqvMax
+ inputs:
+ An instance of InputsCyclicAnalyticSeqvMax.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicAnalyticSeqvMax:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicAnalyticSeqvMax
+ outputs:
+ An instance of OutputsCyclicAnalyticSeqvMax.
"""
return super().outputs
@@ -222,12 +233,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._cyclic_support)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -240,12 +252,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -258,15 +271,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field container with the base and duplicate
- sectors
+ field container with the base and duplicate sectors
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -279,14 +292,15 @@ def fields_container(self):
return self._fields_container
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- Default is true
+ default is true
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -299,12 +313,13 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -337,18 +352,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_analytic_seqv_max()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/cyclic_analytic_usum_max.py b/src/ansys/dpf/core/operators/result/cyclic_analytic_usum_max.py
index f9fde1991f4..30798c7a4cd 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_analytic_usum_max.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_analytic_usum_max.py
@@ -4,32 +4,36 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_analytic_usum_max(Operator):
- """Compute the maximum of the total deformation that can be expected on
- 360 degrees
+ r"""Compute the maximum of the total deformation that can be expected on 360
+ degrees
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer
- Field container with the base and duplicate
- sectors
- bool_rotate_to_global : bool, optional
- Default is true
- cyclic_support : CyclicSupport
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer
+ field container with the base and duplicate sectors
+ bool_rotate_to_global: bool, optional
+ default is true
+ cyclic_support: CyclicSupport
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
+ fields_container: FieldsContainer
+ FieldsContainer filled in
Examples
--------
@@ -88,9 +92,10 @@ def __init__(
self.inputs.cyclic_support.connect(cyclic_support)
@staticmethod
- def _spec():
- description = """Compute the maximum of the total deformation that can be expected on
- 360 degrees"""
+ def _spec() -> Specification:
+ description = r"""Compute the maximum of the total deformation that can be expected on 360
+degrees
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -98,32 +103,31 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field container with the base and duplicate
- sectors""",
+ document=r"""field container with the base and duplicate sectors""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""Default is true""",
+ document=r"""default is true""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -131,14 +135,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -147,29 +151,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cyclic_analytic_usum_max", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicAnalyticUsumMax:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicAnalyticUsumMax
+ inputs:
+ An instance of InputsCyclicAnalyticUsumMax.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicAnalyticUsumMax:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicAnalyticUsumMax
+ outputs:
+ An instance of OutputsCyclicAnalyticUsumMax.
"""
return super().outputs
@@ -218,12 +229,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._cyclic_support)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -236,12 +248,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -254,15 +267,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field container with the base and duplicate
- sectors
+ field container with the base and duplicate sectors
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -275,14 +288,15 @@ def fields_container(self):
return self._fields_container
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- Default is true
+ default is true
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -295,12 +309,13 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -333,18 +348,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_analytic_usum_max()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/cyclic_equivalent_mass.py b/src/ansys/dpf/core/operators/result/cyclic_equivalent_mass.py
index 4a11de8e57b..22835a9b7cd 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_equivalent_mass.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_equivalent_mass.py
@@ -4,59 +4,56 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_equivalent_mass(Operator):
- """This operator is deprecated: use the operator equivalent_mass with the
+ r"""This operator is deprecated: use the operator equivalent_mass with the
read_cyclic pin instead. Read equivalent mass from an rst file and
expand it with cyclic symmetry.
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer or Stream, optional
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer or Stream, optional
Streams containing the result file.
- data_sources : DataSources
- Data sources containing the result file.
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- all_dofs : bool, optional
- Default is false.
- sector_mesh : MeshedRegion or MeshesContainer, optional
- Mesh of the base sector (can be a skin).
- requested_location : str, optional
- Location needed in output
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded.
- cyclic_support : CyclicSupport, optional
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
- phi : float, optional
- Angle phi in degrees (default value 0.0)
+ data_sources: DataSources
+ data sources containing the result file.
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ all_dofs: bool, optional
+ default is false.
+ sector_mesh: MeshedRegion or MeshesContainer, optional
+ mesh of the base sector (can be a skin).
+ requested_location: str, optional
+ location needed in output
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded.
+ cyclic_support: CyclicSupport, optional
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
+ phi: float, optional
+ angle phi in degrees (default value 0.0)
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
- expanded_meshes : MeshesContainer
+ fields_container: FieldsContainer
+ FieldsContainer filled in
+ expanded_meshes: MeshesContainer
Examples
--------
@@ -172,10 +169,11 @@ def __init__(
self.inputs.phi.connect(phi)
@staticmethod
- def _spec():
- description = """This operator is deprecated: use the operator equivalent_mass with the
- read_cyclic pin instead. Read equivalent mass from an rst
- file and expand it with cyclic symmetry."""
+ def _spec() -> Specification:
+ description = r"""This operator is deprecated: use the operator equivalent_mass with the
+read_cyclic pin instead. Read equivalent mass from an rst file and
+expand it with cyclic symmetry.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -183,93 +181,85 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container", "stream"],
optional=True,
- document="""Streams containing the result file.""",
+ document=r"""Streams containing the result file.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data sources containing the result file.""",
+ document=r"""data sources containing the result file.""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
6: PinSpecification(
name="all_dofs",
type_names=["bool"],
optional=True,
- document="""Default is false.""",
+ document=r"""default is false.""",
),
7: PinSpecification(
name="sector_mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh of the base sector (can be a skin).""",
+ document=r"""mesh of the base sector (can be a skin).""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Location needed in output""",
+ document=r"""location needed in output""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded.""",
+ document=r"""mesh expanded.""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=True,
- document="""""",
+ document=r"""""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0)""",
+ document=r"""angle phi in degrees (default value 0.0)""",
),
},
map_output_pin_spec={
@@ -277,20 +267,20 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
1: PinSpecification(
name="expanded_meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -299,31 +289,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="mapdl::rst::equivalent_mass_cyclic", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicEquivalentMass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicEquivalentMass
+ inputs:
+ An instance of InputsCyclicEquivalentMass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicEquivalentMass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicEquivalentMass
+ outputs:
+ An instance of OutputsCyclicEquivalentMass.
"""
return super().outputs
@@ -422,12 +419,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._phi)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -440,12 +438,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -458,15 +457,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -479,14 +478,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams containing the result file.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -499,14 +499,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data sources containing the result file.
+ data sources containing the result file.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -519,15 +520,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -540,14 +541,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def all_dofs(self):
- """Allows to connect all_dofs input to the operator.
+ def all_dofs(self) -> Input:
+ r"""Allows to connect all_dofs input to the operator.
- Default is false.
+ default is false.
- Parameters
- ----------
- my_all_dofs : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -560,14 +562,15 @@ def all_dofs(self):
return self._all_dofs
@property
- def sector_mesh(self):
- """Allows to connect sector_mesh input to the operator.
+ def sector_mesh(self) -> Input:
+ r"""Allows to connect sector_mesh input to the operator.
- Mesh of the base sector (can be a skin).
+ mesh of the base sector (can be a skin).
- Parameters
- ----------
- my_sector_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -580,14 +583,15 @@ def sector_mesh(self):
return self._sector_mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Location needed in output
+ location needed in output
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -600,18 +604,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -624,14 +625,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded.
+ mesh expanded.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -644,12 +646,13 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -662,16 +665,15 @@ def cyclic_support(self):
return self._cyclic_support
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -684,14 +686,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0)
+ angle phi in degrees (default value 0.0)
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -729,35 +732,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._expanded_meshes)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_equivalent_mass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
@property
- def expanded_meshes(self):
- """Allows to get expanded_meshes output of the operator
+ def expanded_meshes(self) -> Output:
+ r"""Allows to get expanded_meshes output of the operator
Returns
- ----------
- my_expanded_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_equivalent_mass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_expanded_meshes = op.outputs.expanded_meshes()
- """ # noqa: E501
+ """
return self._expanded_meshes
diff --git a/src/ansys/dpf/core/operators/result/cyclic_expanded_acceleration.py b/src/ansys/dpf/core/operators/result/cyclic_expanded_acceleration.py
index ce21537ff2d..18bb40822b3 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_expanded_acceleration.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_expanded_acceleration.py
@@ -4,61 +4,56 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_expanded_acceleration(Operator):
- """This operator is deprecated: use the operator acceleration with the
- read_cyclic pin instead. Read acceleration from an rst file and
- expand it with cyclic symmetry.
+ r"""This operator is deprecated: use the operator acceleration with the
+ read_cyclic pin instead. Read acceleration from an rst file and expand
+ it with cyclic symmetry.
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer or Stream, optional
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer or Stream, optional
Streams containing the result file.
- data_sources : DataSources
- Data sources containing the result file.
- bool_rotate_to_global : bool, optional
- Default is true
- all_dofs : bool, optional
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
- sector_mesh : MeshedRegion or MeshesContainer, optional
- Mesh of the base sector (can be a skin).
- requested_location : str, optional
- Location needed in output
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded.
- cyclic_support : CyclicSupport, optional
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
- phi : float, optional
- Angle phi in degrees (default value 0.0)
+ data_sources: DataSources
+ data sources containing the result file.
+ bool_rotate_to_global: bool, optional
+ default is true
+ all_dofs: bool, optional
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
+ sector_mesh: MeshedRegion or MeshesContainer, optional
+ mesh of the base sector (can be a skin).
+ requested_location: str, optional
+ location needed in output
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded.
+ cyclic_support: CyclicSupport, optional
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
+ phi: float, optional
+ angle phi in degrees (default value 0.0)
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
- expanded_meshes : MeshesContainer
+ fields_container: FieldsContainer
+ FieldsContainer filled in
+ expanded_meshes: MeshesContainer
Examples
--------
@@ -172,10 +167,11 @@ def __init__(
self.inputs.phi.connect(phi)
@staticmethod
- def _spec():
- description = """This operator is deprecated: use the operator acceleration with the
- read_cyclic pin instead. Read acceleration from an rst
- file and expand it with cyclic symmetry."""
+ def _spec() -> Specification:
+ description = r"""This operator is deprecated: use the operator acceleration with the
+read_cyclic pin instead. Read acceleration from an rst file and expand
+it with cyclic symmetry.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -183,95 +179,85 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container", "stream"],
optional=True,
- document="""Streams containing the result file.""",
+ document=r"""Streams containing the result file.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data sources containing the result file.""",
+ document=r"""data sources containing the result file.""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""Default is true""",
+ document=r"""default is true""",
),
6: PinSpecification(
name="all_dofs",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.""",
+ document=r"""if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.""",
),
7: PinSpecification(
name="sector_mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh of the base sector (can be a skin).""",
+ document=r"""mesh of the base sector (can be a skin).""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Location needed in output""",
+ document=r"""location needed in output""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded.""",
+ document=r"""mesh expanded.""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=True,
- document="""""",
+ document=r"""""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0)""",
+ document=r"""angle phi in degrees (default value 0.0)""",
),
},
map_output_pin_spec={
@@ -279,20 +265,20 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
1: PinSpecification(
name="expanded_meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -301,29 +287,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mapdl::rst::A_cyclic", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicExpandedAcceleration:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicExpandedAcceleration
+ inputs:
+ An instance of InputsCyclicExpandedAcceleration.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicExpandedAcceleration:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicExpandedAcceleration
+ outputs:
+ An instance of OutputsCyclicExpandedAcceleration.
"""
return super().outputs
@@ -426,12 +419,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._phi)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -444,12 +438,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -462,15 +457,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -483,14 +478,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams containing the result file.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -503,14 +499,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data sources containing the result file.
+ data sources containing the result file.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -523,14 +520,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- Default is true
+ default is true
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -543,17 +541,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def all_dofs(self):
- """Allows to connect all_dofs input to the operator.
+ def all_dofs(self) -> Input:
+ r"""Allows to connect all_dofs input to the operator.
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
- Parameters
- ----------
- my_all_dofs : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -566,14 +562,15 @@ def all_dofs(self):
return self._all_dofs
@property
- def sector_mesh(self):
- """Allows to connect sector_mesh input to the operator.
+ def sector_mesh(self) -> Input:
+ r"""Allows to connect sector_mesh input to the operator.
- Mesh of the base sector (can be a skin).
+ mesh of the base sector (can be a skin).
- Parameters
- ----------
- my_sector_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -586,14 +583,15 @@ def sector_mesh(self):
return self._sector_mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Location needed in output
+ location needed in output
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -606,18 +604,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -630,14 +625,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded.
+ mesh expanded.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -650,12 +646,13 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -668,16 +665,15 @@ def cyclic_support(self):
return self._cyclic_support
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -690,14 +686,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0)
+ angle phi in degrees (default value 0.0)
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -735,35 +732,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._expanded_meshes)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_acceleration()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
@property
- def expanded_meshes(self):
- """Allows to get expanded_meshes output of the operator
+ def expanded_meshes(self) -> Output:
+ r"""Allows to get expanded_meshes output of the operator
Returns
- ----------
- my_expanded_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_acceleration()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_expanded_meshes = op.outputs.expanded_meshes()
- """ # noqa: E501
+ """
return self._expanded_meshes
diff --git a/src/ansys/dpf/core/operators/result/cyclic_expanded_displacement.py b/src/ansys/dpf/core/operators/result/cyclic_expanded_displacement.py
index bad20a05822..111ed09f83d 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_expanded_displacement.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_expanded_displacement.py
@@ -4,61 +4,56 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_expanded_displacement(Operator):
- """This operator is deprecated: use the operator displacements with the
- read_cyclic pin instead. Read displacements from an rst file and
- expand it with cyclic symmetry.
+ r"""This operator is deprecated: use the operator displacements with the
+ read_cyclic pin instead. Read displacements from an rst file and expand
+ it with cyclic symmetry.
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer or Stream, optional
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer or Stream, optional
Streams containing the result file.
- data_sources : DataSources
- Data sources containing the result file.
- bool_rotate_to_global : bool, optional
- Default is true
- all_dofs : bool, optional
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
- sector_mesh : MeshedRegion or MeshesContainer, optional
- Mesh of the base sector (can be a skin).
- requested_location : str, optional
- Location needed in output
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded.
- cyclic_support : CyclicSupport, optional
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
- phi : float, optional
- Angle phi in degrees (default value 0.0)
+ data_sources: DataSources
+ data sources containing the result file.
+ bool_rotate_to_global: bool, optional
+ default is true
+ all_dofs: bool, optional
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
+ sector_mesh: MeshedRegion or MeshesContainer, optional
+ mesh of the base sector (can be a skin).
+ requested_location: str, optional
+ location needed in output
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded.
+ cyclic_support: CyclicSupport, optional
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
+ phi: float, optional
+ angle phi in degrees (default value 0.0)
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
- expanded_meshes : MeshesContainer
+ fields_container: FieldsContainer
+ FieldsContainer filled in
+ expanded_meshes: MeshesContainer
Examples
--------
@@ -172,10 +167,11 @@ def __init__(
self.inputs.phi.connect(phi)
@staticmethod
- def _spec():
- description = """This operator is deprecated: use the operator displacements with the
- read_cyclic pin instead. Read displacements from an rst
- file and expand it with cyclic symmetry."""
+ def _spec() -> Specification:
+ description = r"""This operator is deprecated: use the operator displacements with the
+read_cyclic pin instead. Read displacements from an rst file and expand
+it with cyclic symmetry.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -183,95 +179,85 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container", "stream"],
optional=True,
- document="""Streams containing the result file.""",
+ document=r"""Streams containing the result file.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data sources containing the result file.""",
+ document=r"""data sources containing the result file.""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""Default is true""",
+ document=r"""default is true""",
),
6: PinSpecification(
name="all_dofs",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.""",
+ document=r"""if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.""",
),
7: PinSpecification(
name="sector_mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh of the base sector (can be a skin).""",
+ document=r"""mesh of the base sector (can be a skin).""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Location needed in output""",
+ document=r"""location needed in output""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded.""",
+ document=r"""mesh expanded.""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=True,
- document="""""",
+ document=r"""""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0)""",
+ document=r"""angle phi in degrees (default value 0.0)""",
),
},
map_output_pin_spec={
@@ -279,20 +265,20 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
1: PinSpecification(
name="expanded_meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -301,29 +287,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mapdl::rst::U_cyclic", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicExpandedDisplacement:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicExpandedDisplacement
+ inputs:
+ An instance of InputsCyclicExpandedDisplacement.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicExpandedDisplacement:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicExpandedDisplacement
+ outputs:
+ An instance of OutputsCyclicExpandedDisplacement.
"""
return super().outputs
@@ -426,12 +419,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._phi)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -444,12 +438,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -462,15 +457,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -483,14 +478,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams containing the result file.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -503,14 +499,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data sources containing the result file.
+ data sources containing the result file.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -523,14 +520,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- Default is true
+ default is true
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -543,17 +541,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def all_dofs(self):
- """Allows to connect all_dofs input to the operator.
+ def all_dofs(self) -> Input:
+ r"""Allows to connect all_dofs input to the operator.
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
- Parameters
- ----------
- my_all_dofs : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -566,14 +562,15 @@ def all_dofs(self):
return self._all_dofs
@property
- def sector_mesh(self):
- """Allows to connect sector_mesh input to the operator.
+ def sector_mesh(self) -> Input:
+ r"""Allows to connect sector_mesh input to the operator.
- Mesh of the base sector (can be a skin).
+ mesh of the base sector (can be a skin).
- Parameters
- ----------
- my_sector_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -586,14 +583,15 @@ def sector_mesh(self):
return self._sector_mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Location needed in output
+ location needed in output
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -606,18 +604,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -630,14 +625,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded.
+ mesh expanded.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -650,12 +646,13 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -668,16 +665,15 @@ def cyclic_support(self):
return self._cyclic_support
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -690,14 +686,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0)
+ angle phi in degrees (default value 0.0)
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -735,35 +732,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._expanded_meshes)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_displacement()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
@property
- def expanded_meshes(self):
- """Allows to get expanded_meshes output of the operator
+ def expanded_meshes(self) -> Output:
+ r"""Allows to get expanded_meshes output of the operator
Returns
- ----------
- my_expanded_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_displacement()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_expanded_meshes = op.outputs.expanded_meshes()
- """ # noqa: E501
+ """
return self._expanded_meshes
diff --git a/src/ansys/dpf/core/operators/result/cyclic_expanded_el_strain.py b/src/ansys/dpf/core/operators/result/cyclic_expanded_el_strain.py
index 0de5ac903c8..b59b222d991 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_expanded_el_strain.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_expanded_el_strain.py
@@ -4,61 +4,56 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_expanded_el_strain(Operator):
- """This operator is deprecated: use the operator mapdl::rst::EPEL with
- the read_cyclic pin instead. Read mapdl::rst::EPEL from an rst
- file and expand it with cyclic symmetry.
+ r"""This operator is deprecated: use the operator mapdl::rst::EPEL with the
+ read_cyclic pin instead. Read mapdl::rst::EPEL from an rst file and
+ expand it with cyclic symmetry.
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer or Stream, optional
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer or Stream, optional
Streams containing the result file.
- data_sources : DataSources
- Data sources containing the result file.
- bool_rotate_to_global : bool, optional
- Default is true
- all_dofs : bool, optional
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
- sector_mesh : MeshedRegion or MeshesContainer, optional
- Mesh of the base sector (can be a skin).
- requested_location : str, optional
- Location needed in output
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded.
- cyclic_support : CyclicSupport, optional
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
- phi : float, optional
- Phi angle (default value 0.0)
+ data_sources: DataSources
+ data sources containing the result file.
+ bool_rotate_to_global: bool, optional
+ default is true
+ all_dofs: bool, optional
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
+ sector_mesh: MeshedRegion or MeshesContainer, optional
+ mesh of the base sector (can be a skin).
+ requested_location: str, optional
+ location needed in output
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded.
+ cyclic_support: CyclicSupport, optional
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
+ phi: float, optional
+ phi angle (default value 0.0)
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
- expanded_meshes : MeshesContainer
+ fields_container: FieldsContainer
+ FieldsContainer filled in
+ expanded_meshes: MeshesContainer
Examples
--------
@@ -172,10 +167,11 @@ def __init__(
self.inputs.phi.connect(phi)
@staticmethod
- def _spec():
- description = """This operator is deprecated: use the operator mapdl::rst::EPEL with
- the read_cyclic pin instead. Read mapdl::rst::EPEL from an
- rst file and expand it with cyclic symmetry."""
+ def _spec() -> Specification:
+ description = r"""This operator is deprecated: use the operator mapdl::rst::EPEL with the
+read_cyclic pin instead. Read mapdl::rst::EPEL from an rst file and
+expand it with cyclic symmetry.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -183,95 +179,85 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container", "stream"],
optional=True,
- document="""Streams containing the result file.""",
+ document=r"""Streams containing the result file.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data sources containing the result file.""",
+ document=r"""data sources containing the result file.""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""Default is true""",
+ document=r"""default is true""",
),
6: PinSpecification(
name="all_dofs",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.""",
+ document=r"""if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.""",
),
7: PinSpecification(
name="sector_mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh of the base sector (can be a skin).""",
+ document=r"""mesh of the base sector (can be a skin).""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Location needed in output""",
+ document=r"""location needed in output""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded.""",
+ document=r"""mesh expanded.""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=True,
- document="""""",
+ document=r"""""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Phi angle (default value 0.0)""",
+ document=r"""phi angle (default value 0.0)""",
),
},
map_output_pin_spec={
@@ -279,20 +265,20 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
1: PinSpecification(
name="expanded_meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -301,29 +287,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mapdl::rst::EPEL_cyclic", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicExpandedElStrain:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicExpandedElStrain
+ inputs:
+ An instance of InputsCyclicExpandedElStrain.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicExpandedElStrain:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicExpandedElStrain
+ outputs:
+ An instance of OutputsCyclicExpandedElStrain.
"""
return super().outputs
@@ -424,12 +417,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._phi)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -442,12 +436,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -460,15 +455,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -481,14 +476,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams containing the result file.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -501,14 +497,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data sources containing the result file.
+ data sources containing the result file.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -521,14 +518,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- Default is true
+ default is true
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -541,17 +539,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def all_dofs(self):
- """Allows to connect all_dofs input to the operator.
+ def all_dofs(self) -> Input:
+ r"""Allows to connect all_dofs input to the operator.
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
- Parameters
- ----------
- my_all_dofs : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -564,14 +560,15 @@ def all_dofs(self):
return self._all_dofs
@property
- def sector_mesh(self):
- """Allows to connect sector_mesh input to the operator.
+ def sector_mesh(self) -> Input:
+ r"""Allows to connect sector_mesh input to the operator.
- Mesh of the base sector (can be a skin).
+ mesh of the base sector (can be a skin).
- Parameters
- ----------
- my_sector_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -584,14 +581,15 @@ def sector_mesh(self):
return self._sector_mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Location needed in output
+ location needed in output
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -604,18 +602,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -628,14 +623,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded.
+ mesh expanded.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -648,12 +644,13 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -666,16 +663,15 @@ def cyclic_support(self):
return self._cyclic_support
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -688,14 +684,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Phi angle (default value 0.0)
+ phi angle (default value 0.0)
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -733,35 +730,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._expanded_meshes)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_el_strain()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
@property
- def expanded_meshes(self):
- """Allows to get expanded_meshes output of the operator
+ def expanded_meshes(self) -> Output:
+ r"""Allows to get expanded_meshes output of the operator
Returns
- ----------
- my_expanded_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_el_strain()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_expanded_meshes = op.outputs.expanded_meshes()
- """ # noqa: E501
+ """
return self._expanded_meshes
diff --git a/src/ansys/dpf/core/operators/result/cyclic_expanded_enf.py b/src/ansys/dpf/core/operators/result/cyclic_expanded_enf.py
index 64b09916478..ab7d3a48ab0 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_expanded_enf.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_expanded_enf.py
@@ -4,61 +4,56 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_expanded_enf(Operator):
- """This operator is deprecated: use the operator ENF with the read_cyclic
+ r"""This operator is deprecated: use the operator ENF with the read_cyclic
pin instead. Read ENF from an rst file and expand it with cyclic
symmetry.
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer or Stream, optional
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer or Stream, optional
Streams containing the result file.
- data_sources : DataSources
- Data sources containing the result file.
- bool_rotate_to_global : bool, optional
- Default is true
- all_dofs : bool, optional
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
- sector_mesh : MeshedRegion or MeshesContainer, optional
- Mesh of the base sector (can be a skin).
- requested_location : str, optional
- Location needed in output
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded.
- cyclic_support : CyclicSupport, optional
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
- phi : float, optional
- Phi angle (default value 0.0)
+ data_sources: DataSources
+ data sources containing the result file.
+ bool_rotate_to_global: bool, optional
+ default is true
+ all_dofs: bool, optional
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
+ sector_mesh: MeshedRegion or MeshesContainer, optional
+ mesh of the base sector (can be a skin).
+ requested_location: str, optional
+ location needed in output
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded.
+ cyclic_support: CyclicSupport, optional
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
+ phi: float, optional
+ phi angle (default value 0.0)
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
- expanded_meshes : MeshesContainer
+ fields_container: FieldsContainer
+ FieldsContainer filled in
+ expanded_meshes: MeshesContainer
Examples
--------
@@ -172,10 +167,11 @@ def __init__(
self.inputs.phi.connect(phi)
@staticmethod
- def _spec():
- description = """This operator is deprecated: use the operator ENF with the read_cyclic
- pin instead. Read ENF from an rst file and expand it with
- cyclic symmetry."""
+ def _spec() -> Specification:
+ description = r"""This operator is deprecated: use the operator ENF with the read_cyclic
+pin instead. Read ENF from an rst file and expand it with cyclic
+symmetry.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -183,95 +179,85 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container", "stream"],
optional=True,
- document="""Streams containing the result file.""",
+ document=r"""Streams containing the result file.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data sources containing the result file.""",
+ document=r"""data sources containing the result file.""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""Default is true""",
+ document=r"""default is true""",
),
6: PinSpecification(
name="all_dofs",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.""",
+ document=r"""if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.""",
),
7: PinSpecification(
name="sector_mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh of the base sector (can be a skin).""",
+ document=r"""mesh of the base sector (can be a skin).""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Location needed in output""",
+ document=r"""location needed in output""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded.""",
+ document=r"""mesh expanded.""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=True,
- document="""""",
+ document=r"""""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Phi angle (default value 0.0)""",
+ document=r"""phi angle (default value 0.0)""",
),
},
map_output_pin_spec={
@@ -279,20 +265,20 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
1: PinSpecification(
name="expanded_meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -301,29 +287,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mapdl::rst::ENF_cyclic", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicExpandedEnf:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicExpandedEnf
+ inputs:
+ An instance of InputsCyclicExpandedEnf.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicExpandedEnf:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicExpandedEnf
+ outputs:
+ An instance of OutputsCyclicExpandedEnf.
"""
return super().outputs
@@ -412,12 +405,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._phi)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -430,12 +424,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -448,15 +443,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -469,14 +464,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams containing the result file.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -489,14 +485,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data sources containing the result file.
+ data sources containing the result file.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -509,14 +506,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- Default is true
+ default is true
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -529,17 +527,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def all_dofs(self):
- """Allows to connect all_dofs input to the operator.
+ def all_dofs(self) -> Input:
+ r"""Allows to connect all_dofs input to the operator.
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
- Parameters
- ----------
- my_all_dofs : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -552,14 +548,15 @@ def all_dofs(self):
return self._all_dofs
@property
- def sector_mesh(self):
- """Allows to connect sector_mesh input to the operator.
+ def sector_mesh(self) -> Input:
+ r"""Allows to connect sector_mesh input to the operator.
- Mesh of the base sector (can be a skin).
+ mesh of the base sector (can be a skin).
- Parameters
- ----------
- my_sector_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -572,14 +569,15 @@ def sector_mesh(self):
return self._sector_mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Location needed in output
+ location needed in output
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -592,18 +590,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -616,14 +611,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded.
+ mesh expanded.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -636,12 +632,13 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -654,16 +651,15 @@ def cyclic_support(self):
return self._cyclic_support
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -676,14 +672,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Phi angle (default value 0.0)
+ phi angle (default value 0.0)
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -719,35 +716,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._expanded_meshes)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_enf()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
@property
- def expanded_meshes(self):
- """Allows to get expanded_meshes output of the operator
+ def expanded_meshes(self) -> Output:
+ r"""Allows to get expanded_meshes output of the operator
Returns
- ----------
- my_expanded_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_enf()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_expanded_meshes = op.outputs.expanded_meshes()
- """ # noqa: E501
+ """
return self._expanded_meshes
diff --git a/src/ansys/dpf/core/operators/result/cyclic_expanded_heat_flux.py b/src/ansys/dpf/core/operators/result/cyclic_expanded_heat_flux.py
index b90ce6a4899..14d140ffc3b 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_expanded_heat_flux.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_expanded_heat_flux.py
@@ -4,61 +4,56 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_expanded_heat_flux(Operator):
- """This operator is deprecated: use the operator mapdl::rst::TF with the
- read_cyclic pin instead. Read mapdl::rst::TF from an rst file and
- expand it with cyclic symmetry.
+ r"""This operator is deprecated: use the operator mapdl::rst::TF with the
+ read_cyclic pin instead. Read mapdl::rst::TF from an rst file and expand
+ it with cyclic symmetry.
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer or Stream, optional
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer or Stream, optional
Streams containing the result file.
- data_sources : DataSources
- Data sources containing the result file.
- bool_rotate_to_global : bool, optional
- Default is true
- all_dofs : bool, optional
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
- sector_mesh : MeshedRegion or MeshesContainer, optional
- Mesh of the base sector (can be a skin).
- requested_location : str, optional
- Location needed in output
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded.
- cyclic_support : CyclicSupport, optional
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
- phi : float, optional
- Phi angle (default value 0.0)
+ data_sources: DataSources
+ data sources containing the result file.
+ bool_rotate_to_global: bool, optional
+ default is true
+ all_dofs: bool, optional
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
+ sector_mesh: MeshedRegion or MeshesContainer, optional
+ mesh of the base sector (can be a skin).
+ requested_location: str, optional
+ location needed in output
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded.
+ cyclic_support: CyclicSupport, optional
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
+ phi: float, optional
+ phi angle (default value 0.0)
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
- expanded_meshes : MeshesContainer
+ fields_container: FieldsContainer
+ FieldsContainer filled in
+ expanded_meshes: MeshesContainer
Examples
--------
@@ -172,10 +167,11 @@ def __init__(
self.inputs.phi.connect(phi)
@staticmethod
- def _spec():
- description = """This operator is deprecated: use the operator mapdl::rst::TF with the
- read_cyclic pin instead. Read mapdl::rst::TF from an rst
- file and expand it with cyclic symmetry."""
+ def _spec() -> Specification:
+ description = r"""This operator is deprecated: use the operator mapdl::rst::TF with the
+read_cyclic pin instead. Read mapdl::rst::TF from an rst file and expand
+it with cyclic symmetry.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -183,95 +179,85 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container", "stream"],
optional=True,
- document="""Streams containing the result file.""",
+ document=r"""Streams containing the result file.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data sources containing the result file.""",
+ document=r"""data sources containing the result file.""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""Default is true""",
+ document=r"""default is true""",
),
6: PinSpecification(
name="all_dofs",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.""",
+ document=r"""if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.""",
),
7: PinSpecification(
name="sector_mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh of the base sector (can be a skin).""",
+ document=r"""mesh of the base sector (can be a skin).""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Location needed in output""",
+ document=r"""location needed in output""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded.""",
+ document=r"""mesh expanded.""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=True,
- document="""""",
+ document=r"""""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Phi angle (default value 0.0)""",
+ document=r"""phi angle (default value 0.0)""",
),
},
map_output_pin_spec={
@@ -279,20 +265,20 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
1: PinSpecification(
name="expanded_meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -301,29 +287,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mapdl::rst::TF_cyclic", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicExpandedHeatFlux:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicExpandedHeatFlux
+ inputs:
+ An instance of InputsCyclicExpandedHeatFlux.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicExpandedHeatFlux:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicExpandedHeatFlux
+ outputs:
+ An instance of OutputsCyclicExpandedHeatFlux.
"""
return super().outputs
@@ -424,12 +417,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._phi)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -442,12 +436,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -460,15 +455,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -481,14 +476,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams containing the result file.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -501,14 +497,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data sources containing the result file.
+ data sources containing the result file.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -521,14 +518,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- Default is true
+ default is true
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -541,17 +539,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def all_dofs(self):
- """Allows to connect all_dofs input to the operator.
+ def all_dofs(self) -> Input:
+ r"""Allows to connect all_dofs input to the operator.
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
- Parameters
- ----------
- my_all_dofs : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -564,14 +560,15 @@ def all_dofs(self):
return self._all_dofs
@property
- def sector_mesh(self):
- """Allows to connect sector_mesh input to the operator.
+ def sector_mesh(self) -> Input:
+ r"""Allows to connect sector_mesh input to the operator.
- Mesh of the base sector (can be a skin).
+ mesh of the base sector (can be a skin).
- Parameters
- ----------
- my_sector_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -584,14 +581,15 @@ def sector_mesh(self):
return self._sector_mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Location needed in output
+ location needed in output
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -604,18 +602,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -628,14 +623,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded.
+ mesh expanded.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -648,12 +644,13 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -666,16 +663,15 @@ def cyclic_support(self):
return self._cyclic_support
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -688,14 +684,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Phi angle (default value 0.0)
+ phi angle (default value 0.0)
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -733,35 +730,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._expanded_meshes)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_heat_flux()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
@property
- def expanded_meshes(self):
- """Allows to get expanded_meshes output of the operator
+ def expanded_meshes(self) -> Output:
+ r"""Allows to get expanded_meshes output of the operator
Returns
- ----------
- my_expanded_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_heat_flux()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_expanded_meshes = op.outputs.expanded_meshes()
- """ # noqa: E501
+ """
return self._expanded_meshes
diff --git a/src/ansys/dpf/core/operators/result/cyclic_expanded_stress.py b/src/ansys/dpf/core/operators/result/cyclic_expanded_stress.py
index 6e7e4f0ddef..86b768efe16 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_expanded_stress.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_expanded_stress.py
@@ -4,61 +4,56 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_expanded_stress(Operator):
- """This operator is deprecated: use the operator mapdl::rst::S with the
- read_cyclic pin instead. Read mapdl::rst::S from an rst file and
- expand it with cyclic symmetry.
+ r"""This operator is deprecated: use the operator mapdl::rst::S with the
+ read_cyclic pin instead. Read mapdl::rst::S from an rst file and expand
+ it with cyclic symmetry.
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer or Stream, optional
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer or Stream, optional
Streams containing the result file.
- data_sources : DataSources
- Data sources containing the result file.
- bool_rotate_to_global : bool, optional
- Default is true
- all_dofs : bool, optional
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
- sector_mesh : MeshedRegion or MeshesContainer, optional
- Mesh of the base sector (can be a skin).
- requested_location : str, optional
- Location needed in output
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded.
- cyclic_support : CyclicSupport, optional
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
- phi : float, optional
- Phi angle (default value 0.0)
+ data_sources: DataSources
+ data sources containing the result file.
+ bool_rotate_to_global: bool, optional
+ default is true
+ all_dofs: bool, optional
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
+ sector_mesh: MeshedRegion or MeshesContainer, optional
+ mesh of the base sector (can be a skin).
+ requested_location: str, optional
+ location needed in output
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded.
+ cyclic_support: CyclicSupport, optional
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
+ phi: float, optional
+ phi angle (default value 0.0)
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
- expanded_meshes : MeshesContainer
+ fields_container: FieldsContainer
+ FieldsContainer filled in
+ expanded_meshes: MeshesContainer
Examples
--------
@@ -172,10 +167,11 @@ def __init__(
self.inputs.phi.connect(phi)
@staticmethod
- def _spec():
- description = """This operator is deprecated: use the operator mapdl::rst::S with the
- read_cyclic pin instead. Read mapdl::rst::S from an rst
- file and expand it with cyclic symmetry."""
+ def _spec() -> Specification:
+ description = r"""This operator is deprecated: use the operator mapdl::rst::S with the
+read_cyclic pin instead. Read mapdl::rst::S from an rst file and expand
+it with cyclic symmetry.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -183,95 +179,85 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container", "stream"],
optional=True,
- document="""Streams containing the result file.""",
+ document=r"""Streams containing the result file.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data sources containing the result file.""",
+ document=r"""data sources containing the result file.""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""Default is true""",
+ document=r"""default is true""",
),
6: PinSpecification(
name="all_dofs",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.""",
+ document=r"""if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.""",
),
7: PinSpecification(
name="sector_mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh of the base sector (can be a skin).""",
+ document=r"""mesh of the base sector (can be a skin).""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Location needed in output""",
+ document=r"""location needed in output""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded.""",
+ document=r"""mesh expanded.""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=True,
- document="""""",
+ document=r"""""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Phi angle (default value 0.0)""",
+ document=r"""phi angle (default value 0.0)""",
),
},
map_output_pin_spec={
@@ -279,20 +265,20 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
1: PinSpecification(
name="expanded_meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -301,29 +287,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mapdl::rst::S_cyclic", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicExpandedStress:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicExpandedStress
+ inputs:
+ An instance of InputsCyclicExpandedStress.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicExpandedStress:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicExpandedStress
+ outputs:
+ An instance of OutputsCyclicExpandedStress.
"""
return super().outputs
@@ -422,12 +415,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._phi)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -440,12 +434,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -458,15 +453,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -479,14 +474,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams containing the result file.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -499,14 +495,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data sources containing the result file.
+ data sources containing the result file.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -519,14 +516,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- Default is true
+ default is true
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -539,17 +537,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def all_dofs(self):
- """Allows to connect all_dofs input to the operator.
+ def all_dofs(self) -> Input:
+ r"""Allows to connect all_dofs input to the operator.
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
- Parameters
- ----------
- my_all_dofs : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,14 +558,15 @@ def all_dofs(self):
return self._all_dofs
@property
- def sector_mesh(self):
- """Allows to connect sector_mesh input to the operator.
+ def sector_mesh(self) -> Input:
+ r"""Allows to connect sector_mesh input to the operator.
- Mesh of the base sector (can be a skin).
+ mesh of the base sector (can be a skin).
- Parameters
- ----------
- my_sector_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -582,14 +579,15 @@ def sector_mesh(self):
return self._sector_mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Location needed in output
+ location needed in output
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -602,18 +600,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -626,14 +621,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded.
+ mesh expanded.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -646,12 +642,13 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -664,16 +661,15 @@ def cyclic_support(self):
return self._cyclic_support
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -686,14 +682,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Phi angle (default value 0.0)
+ phi angle (default value 0.0)
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -731,35 +728,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._expanded_meshes)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_stress()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
@property
- def expanded_meshes(self):
- """Allows to get expanded_meshes output of the operator
+ def expanded_meshes(self) -> Output:
+ r"""Allows to get expanded_meshes output of the operator
Returns
- ----------
- my_expanded_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_stress()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_expanded_meshes = op.outputs.expanded_meshes()
- """ # noqa: E501
+ """
return self._expanded_meshes
diff --git a/src/ansys/dpf/core/operators/result/cyclic_expanded_temperature.py b/src/ansys/dpf/core/operators/result/cyclic_expanded_temperature.py
index 6d4540964b8..75493a3887a 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_expanded_temperature.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_expanded_temperature.py
@@ -4,61 +4,56 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_expanded_temperature(Operator):
- """This operator is deprecated: use the operator temperature with the
- read_cyclic pin instead. Read temperature from an rst file and
- expand it with cyclic symmetry.
+ r"""This operator is deprecated: use the operator temperature with the
+ read_cyclic pin instead. Read temperature from an rst file and expand it
+ with cyclic symmetry.
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer or Stream, optional
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer or Stream, optional
Streams containing the result file.
- data_sources : DataSources
- Data sources containing the result file.
- bool_rotate_to_global : bool, optional
- Default is true
- all_dofs : bool, optional
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
- sector_mesh : MeshedRegion or MeshesContainer, optional
- Mesh of the base sector (can be a skin).
- requested_location : str, optional
- Location needed in output
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded.
- cyclic_support : CyclicSupport, optional
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
- phi : float, optional
- Angle phi in degrees (default value 0.0)
+ data_sources: DataSources
+ data sources containing the result file.
+ bool_rotate_to_global: bool, optional
+ default is true
+ all_dofs: bool, optional
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
+ sector_mesh: MeshedRegion or MeshesContainer, optional
+ mesh of the base sector (can be a skin).
+ requested_location: str, optional
+ location needed in output
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded.
+ cyclic_support: CyclicSupport, optional
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
+ phi: float, optional
+ angle phi in degrees (default value 0.0)
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
- expanded_meshes : MeshesContainer
+ fields_container: FieldsContainer
+ FieldsContainer filled in
+ expanded_meshes: MeshesContainer
Examples
--------
@@ -172,10 +167,11 @@ def __init__(
self.inputs.phi.connect(phi)
@staticmethod
- def _spec():
- description = """This operator is deprecated: use the operator temperature with the
- read_cyclic pin instead. Read temperature from an rst file
- and expand it with cyclic symmetry."""
+ def _spec() -> Specification:
+ description = r"""This operator is deprecated: use the operator temperature with the
+read_cyclic pin instead. Read temperature from an rst file and expand it
+with cyclic symmetry.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -183,95 +179,85 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container", "stream"],
optional=True,
- document="""Streams containing the result file.""",
+ document=r"""Streams containing the result file.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data sources containing the result file.""",
+ document=r"""data sources containing the result file.""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""Default is true""",
+ document=r"""default is true""",
),
6: PinSpecification(
name="all_dofs",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.""",
+ document=r"""if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.""",
),
7: PinSpecification(
name="sector_mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh of the base sector (can be a skin).""",
+ document=r"""mesh of the base sector (can be a skin).""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Location needed in output""",
+ document=r"""location needed in output""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded.""",
+ document=r"""mesh expanded.""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=True,
- document="""""",
+ document=r"""""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0)""",
+ document=r"""angle phi in degrees (default value 0.0)""",
),
},
map_output_pin_spec={
@@ -279,20 +265,20 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
1: PinSpecification(
name="expanded_meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -301,29 +287,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mapdl::rst::TEMP_cyclic", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicExpandedTemperature:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicExpandedTemperature
+ inputs:
+ An instance of InputsCyclicExpandedTemperature.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicExpandedTemperature:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicExpandedTemperature
+ outputs:
+ An instance of OutputsCyclicExpandedTemperature.
"""
return super().outputs
@@ -424,12 +417,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._phi)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -442,12 +436,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -460,15 +455,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -481,14 +476,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams containing the result file.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -501,14 +497,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data sources containing the result file.
+ data sources containing the result file.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -521,14 +518,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- Default is true
+ default is true
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -541,17 +539,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def all_dofs(self):
- """Allows to connect all_dofs input to the operator.
+ def all_dofs(self) -> Input:
+ r"""Allows to connect all_dofs input to the operator.
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
- Parameters
- ----------
- my_all_dofs : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -564,14 +560,15 @@ def all_dofs(self):
return self._all_dofs
@property
- def sector_mesh(self):
- """Allows to connect sector_mesh input to the operator.
+ def sector_mesh(self) -> Input:
+ r"""Allows to connect sector_mesh input to the operator.
- Mesh of the base sector (can be a skin).
+ mesh of the base sector (can be a skin).
- Parameters
- ----------
- my_sector_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -584,14 +581,15 @@ def sector_mesh(self):
return self._sector_mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Location needed in output
+ location needed in output
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -604,18 +602,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -628,14 +623,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded.
+ mesh expanded.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -648,12 +644,13 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -666,16 +663,15 @@ def cyclic_support(self):
return self._cyclic_support
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -688,14 +684,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0)
+ angle phi in degrees (default value 0.0)
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -733,35 +730,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._expanded_meshes)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_temperature()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
@property
- def expanded_meshes(self):
- """Allows to get expanded_meshes output of the operator
+ def expanded_meshes(self) -> Output:
+ r"""Allows to get expanded_meshes output of the operator
Returns
- ----------
- my_expanded_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_temperature()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_expanded_meshes = op.outputs.expanded_meshes()
- """ # noqa: E501
+ """
return self._expanded_meshes
diff --git a/src/ansys/dpf/core/operators/result/cyclic_expanded_velocity.py b/src/ansys/dpf/core/operators/result/cyclic_expanded_velocity.py
index 1974da96a8e..bc67e3aeac2 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_expanded_velocity.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_expanded_velocity.py
@@ -4,61 +4,56 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_expanded_velocity(Operator):
- """This operator is deprecated: use the operator velocity with the
- read_cyclic pin instead. Read velocity from an rst file and expand
- it with cyclic symmetry.
+ r"""This operator is deprecated: use the operator velocity with the
+ read_cyclic pin instead. Read velocity from an rst file and expand it
+ with cyclic symmetry.
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer or Stream, optional
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer or Stream, optional
Streams containing the result file.
- data_sources : DataSources
- Data sources containing the result file.
- bool_rotate_to_global : bool, optional
- Default is true
- all_dofs : bool, optional
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
- sector_mesh : MeshedRegion or MeshesContainer, optional
- Mesh of the base sector (can be a skin).
- requested_location : str, optional
- Location needed in output
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded.
- cyclic_support : CyclicSupport, optional
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
- phi : float, optional
- Angle phi in degrees (default value 0.0)
+ data_sources: DataSources
+ data sources containing the result file.
+ bool_rotate_to_global: bool, optional
+ default is true
+ all_dofs: bool, optional
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
+ sector_mesh: MeshedRegion or MeshesContainer, optional
+ mesh of the base sector (can be a skin).
+ requested_location: str, optional
+ location needed in output
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded.
+ cyclic_support: CyclicSupport, optional
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
+ phi: float, optional
+ angle phi in degrees (default value 0.0)
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
- expanded_meshes : MeshesContainer
+ fields_container: FieldsContainer
+ FieldsContainer filled in
+ expanded_meshes: MeshesContainer
Examples
--------
@@ -172,10 +167,11 @@ def __init__(
self.inputs.phi.connect(phi)
@staticmethod
- def _spec():
- description = """This operator is deprecated: use the operator velocity with the
- read_cyclic pin instead. Read velocity from an rst file
- and expand it with cyclic symmetry."""
+ def _spec() -> Specification:
+ description = r"""This operator is deprecated: use the operator velocity with the
+read_cyclic pin instead. Read velocity from an rst file and expand it
+with cyclic symmetry.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -183,95 +179,85 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container", "stream"],
optional=True,
- document="""Streams containing the result file.""",
+ document=r"""Streams containing the result file.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data sources containing the result file.""",
+ document=r"""data sources containing the result file.""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""Default is true""",
+ document=r"""default is true""",
),
6: PinSpecification(
name="all_dofs",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.""",
+ document=r"""if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.""",
),
7: PinSpecification(
name="sector_mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh of the base sector (can be a skin).""",
+ document=r"""mesh of the base sector (can be a skin).""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Location needed in output""",
+ document=r"""location needed in output""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded.""",
+ document=r"""mesh expanded.""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=True,
- document="""""",
+ document=r"""""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0)""",
+ document=r"""angle phi in degrees (default value 0.0)""",
),
},
map_output_pin_spec={
@@ -279,20 +265,20 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
1: PinSpecification(
name="expanded_meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -301,29 +287,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mapdl::rst::V_cyclic", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicExpandedVelocity:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicExpandedVelocity
+ inputs:
+ An instance of InputsCyclicExpandedVelocity.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicExpandedVelocity:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicExpandedVelocity
+ outputs:
+ An instance of OutputsCyclicExpandedVelocity.
"""
return super().outputs
@@ -422,12 +415,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._phi)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -440,12 +434,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -458,15 +453,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -479,14 +474,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams containing the result file.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -499,14 +495,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data sources containing the result file.
+ data sources containing the result file.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -519,14 +516,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- Default is true
+ default is true
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -539,17 +537,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def all_dofs(self):
- """Allows to connect all_dofs input to the operator.
+ def all_dofs(self) -> Input:
+ r"""Allows to connect all_dofs input to the operator.
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
- Parameters
- ----------
- my_all_dofs : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,14 +558,15 @@ def all_dofs(self):
return self._all_dofs
@property
- def sector_mesh(self):
- """Allows to connect sector_mesh input to the operator.
+ def sector_mesh(self) -> Input:
+ r"""Allows to connect sector_mesh input to the operator.
- Mesh of the base sector (can be a skin).
+ mesh of the base sector (can be a skin).
- Parameters
- ----------
- my_sector_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -582,14 +579,15 @@ def sector_mesh(self):
return self._sector_mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Location needed in output
+ location needed in output
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -602,18 +600,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -626,14 +621,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded.
+ mesh expanded.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -646,12 +642,13 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -664,16 +661,15 @@ def cyclic_support(self):
return self._cyclic_support
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -686,14 +682,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0)
+ angle phi in degrees (default value 0.0)
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -731,35 +728,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._expanded_meshes)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_velocity()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
@property
- def expanded_meshes(self):
- """Allows to get expanded_meshes output of the operator
+ def expanded_meshes(self) -> Output:
+ r"""Allows to get expanded_meshes output of the operator
Returns
- ----------
- my_expanded_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expanded_velocity()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_expanded_meshes = op.outputs.expanded_meshes()
- """ # noqa: E501
+ """
return self._expanded_meshes
diff --git a/src/ansys/dpf/core/operators/result/cyclic_expansion.py b/src/ansys/dpf/core/operators/result/cyclic_expansion.py
index c3c8c1ea6ca..0105e74374c 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_expansion.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_expansion.py
@@ -4,43 +4,45 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_expansion(Operator):
- """Expand cyclic results from a fieldsContainer for given sets, sectors
- and scoping (optionals).
+ r"""Expand cyclic results from a fieldsContainer for given sets, sectors and
+ scoping (optionals).
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer
- Field container with the base and duplicate
- sectors
- harmonic_index : int, optional
- bool_rotate_to_global : bool, optional
- Default is true
- map_size_scoping_out : optional
- Map provider by scoping adapter
- normalization_factor : float, optional
- merge_stages : bool, optional
- cyclic_support : CyclicSupport
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
- phi : float, optional
- Angle phi in degrees (default value 0.0)
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer
+ field container with the base and duplicate sectors
+ harmonic_index: int, optional
+ bool_rotate_to_global: bool, optional
+ default is true
+ map_size_scoping_out: optional
+ map provider by scoping adapter
+ normalization_factor: float, optional
+ merge_stages: bool, optional
+ cyclic_support: CyclicSupport
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
+ phi: float, optional
+ angle phi in degrees (default value 0.0)
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
+ fields_container: FieldsContainer
+ FieldsContainer filled in
Examples
--------
@@ -135,9 +137,10 @@ def __init__(
self.inputs.phi.connect(phi)
@staticmethod
- def _spec():
- description = """Expand cyclic results from a fieldsContainer for given sets, sectors
- and scoping (optionals)."""
+ def _spec() -> Specification:
+ description = r"""Expand cyclic results from a fieldsContainer for given sets, sectors and
+scoping (optionals).
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -145,70 +148,67 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Field container with the base and duplicate
- sectors""",
+ document=r"""field container with the base and duplicate sectors""",
),
3: PinSpecification(
name="harmonic_index",
type_names=["int32"],
optional=True,
- document="""""",
+ document=r"""""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""Default is true""",
+ document=r"""default is true""",
),
6: PinSpecification(
name="map_size_scoping_out",
type_names=["umap"],
optional=True,
- document="""Map provider by scoping adapter""",
+ document=r"""map provider by scoping adapter""",
),
7: PinSpecification(
name="normalization_factor",
type_names=["double"],
optional=True,
- document="""""",
+ document=r"""""",
),
14: PinSpecification(
name="merge_stages",
type_names=["bool"],
optional=True,
- document="""""",
+ document=r"""""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=False,
- document="""""",
+ document=r"""""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0)""",
+ document=r"""angle phi in degrees (default value 0.0)""",
),
},
map_output_pin_spec={
@@ -216,14 +216,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -232,29 +232,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="cyclic_expansion", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicExpansion:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicExpansion
+ inputs:
+ An instance of InputsCyclicExpansion.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicExpansion:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicExpansion
+ outputs:
+ An instance of OutputsCyclicExpansion.
"""
return super().outputs
@@ -325,12 +332,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._phi)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -343,12 +351,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -361,15 +370,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Field container with the base and duplicate
- sectors
+ field container with the base and duplicate sectors
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -382,12 +391,13 @@ def fields_container(self):
return self._fields_container
@property
- def harmonic_index(self):
- """Allows to connect harmonic_index input to the operator.
+ def harmonic_index(self) -> Input:
+ r"""Allows to connect harmonic_index input to the operator.
- Parameters
- ----------
- my_harmonic_index : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -400,14 +410,15 @@ def harmonic_index(self):
return self._harmonic_index
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- Default is true
+ default is true
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -420,14 +431,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def map_size_scoping_out(self):
- """Allows to connect map_size_scoping_out input to the operator.
+ def map_size_scoping_out(self) -> Input:
+ r"""Allows to connect map_size_scoping_out input to the operator.
- Map provider by scoping adapter
+ map provider by scoping adapter
- Parameters
- ----------
- my_map_size_scoping_out :
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -440,12 +452,13 @@ def map_size_scoping_out(self):
return self._map_size_scoping_out
@property
- def normalization_factor(self):
- """Allows to connect normalization_factor input to the operator.
+ def normalization_factor(self) -> Input:
+ r"""Allows to connect normalization_factor input to the operator.
- Parameters
- ----------
- my_normalization_factor : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -458,12 +471,13 @@ def normalization_factor(self):
return self._normalization_factor
@property
- def merge_stages(self):
- """Allows to connect merge_stages input to the operator.
+ def merge_stages(self) -> Input:
+ r"""Allows to connect merge_stages input to the operator.
- Parameters
- ----------
- my_merge_stages : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -476,12 +490,13 @@ def merge_stages(self):
return self._merge_stages
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -494,16 +509,15 @@ def cyclic_support(self):
return self._cyclic_support
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -516,14 +530,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0)
+ angle phi in degrees (default value 0.0)
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -554,18 +569,21 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_expansion()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/cyclic_kinetic_energy.py b/src/ansys/dpf/core/operators/result/cyclic_kinetic_energy.py
index f969e09718f..f0f01511659 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_kinetic_energy.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_kinetic_energy.py
@@ -4,54 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_kinetic_energy(Operator):
- """This operator is deprecated: use the operator mapdl::rst::ENG_KE with
- the read_cyclic pin instead. Compute mapdl::rst::ENG_KE from an
- rst file and expand it with cyclic symmetry.
+ r"""This operator is deprecated: use the operator mapdl::rst::ENG_KE with
+ the read_cyclic pin instead. Compute mapdl::rst::ENG_KE from an rst file
+ and expand it with cyclic symmetry.
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer or Stream, optional
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer or Stream, optional
Streams containing the result file.
- data_sources : DataSources
- Data sources containing the result file.
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- all_dofs : bool, optional
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
- sector_mesh : MeshedRegion or MeshesContainer, optional
- Mesh of the base sector (can be a skin).
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded.
- cyclic_support : CyclicSupport, optional
+ data_sources: DataSources
+ data sources containing the result file.
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ all_dofs: bool, optional
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
+ sector_mesh: MeshedRegion or MeshesContainer, optional
+ mesh of the base sector (can be a skin).
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded.
+ cyclic_support: CyclicSupport, optional
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
- expanded_meshes : MeshesContainer
+ fields_container: FieldsContainer
+ FieldsContainer filled in
+ expanded_meshes: MeshesContainer
Examples
--------
@@ -147,10 +143,11 @@ def __init__(
self.inputs.cyclic_support.connect(cyclic_support)
@staticmethod
- def _spec():
- description = """This operator is deprecated: use the operator mapdl::rst::ENG_KE with
- the read_cyclic pin instead. Compute mapdl::rst::ENG_KE
- from an rst file and expand it with cyclic symmetry."""
+ def _spec() -> Specification:
+ description = r"""This operator is deprecated: use the operator mapdl::rst::ENG_KE with
+the read_cyclic pin instead. Compute mapdl::rst::ENG_KE from an rst file
+and expand it with cyclic symmetry.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -158,76 +155,67 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container", "stream"],
optional=True,
- document="""Streams containing the result file.""",
+ document=r"""Streams containing the result file.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data sources containing the result file.""",
+ document=r"""data sources containing the result file.""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
6: PinSpecification(
name="all_dofs",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.""",
+ document=r"""if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.""",
),
7: PinSpecification(
name="sector_mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh of the base sector (can be a skin).""",
+ document=r"""mesh of the base sector (can be a skin).""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded.""",
+ document=r"""mesh expanded.""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -235,20 +223,20 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
1: PinSpecification(
name="expanded_meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -257,29 +245,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mapdl::rst::ENG_KE_cyclic", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicKineticEnergy:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicKineticEnergy
+ inputs:
+ An instance of InputsCyclicKineticEnergy.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicKineticEnergy:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicKineticEnergy
+ outputs:
+ An instance of OutputsCyclicKineticEnergy.
"""
return super().outputs
@@ -360,12 +355,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._cyclic_support)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -378,12 +374,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -396,15 +393,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -417,14 +414,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams containing the result file.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -437,14 +435,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data sources containing the result file.
+ data sources containing the result file.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -457,15 +456,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -478,17 +477,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def all_dofs(self):
- """Allows to connect all_dofs input to the operator.
+ def all_dofs(self) -> Input:
+ r"""Allows to connect all_dofs input to the operator.
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
- Parameters
- ----------
- my_all_dofs : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -501,14 +498,15 @@ def all_dofs(self):
return self._all_dofs
@property
- def sector_mesh(self):
- """Allows to connect sector_mesh input to the operator.
+ def sector_mesh(self) -> Input:
+ r"""Allows to connect sector_mesh input to the operator.
- Mesh of the base sector (can be a skin).
+ mesh of the base sector (can be a skin).
- Parameters
- ----------
- my_sector_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -521,18 +519,15 @@ def sector_mesh(self):
return self._sector_mesh
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -545,14 +540,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded.
+ mesh expanded.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,12 +561,13 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -608,35 +605,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._expanded_meshes)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_kinetic_energy()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
@property
- def expanded_meshes(self):
- """Allows to get expanded_meshes output of the operator
+ def expanded_meshes(self) -> Output:
+ r"""Allows to get expanded_meshes output of the operator
Returns
- ----------
- my_expanded_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_kinetic_energy()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_expanded_meshes = op.outputs.expanded_meshes()
- """ # noqa: E501
+ """
return self._expanded_meshes
diff --git a/src/ansys/dpf/core/operators/result/cyclic_nmisc.py b/src/ansys/dpf/core/operators/result/cyclic_nmisc.py
index 4ce658597cb..7386d3719ce 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_nmisc.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_nmisc.py
@@ -4,54 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_nmisc(Operator):
- """This operator is deprecated: use the operator mapdl::rst::NMISC with
- the read_cyclic pin instead. Compute mapdl::rst::NMISC from an rst
- file and expand it with cyclic symmetry.
+ r"""This operator is deprecated: use the operator mapdl::rst::NMISC with the
+ read_cyclic pin instead. Compute mapdl::rst::NMISC from an rst file and
+ expand it with cyclic symmetry.
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer or Stream, optional
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer or Stream, optional
Streams containing the result file.
- data_sources : DataSources
- Data sources containing the result file.
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- all_dofs : bool, optional
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
- sector_mesh : MeshedRegion or MeshesContainer, optional
- Mesh of the base sector (can be a skin).
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded.
- cyclic_support : CyclicSupport, optional
+ data_sources: DataSources
+ data sources containing the result file.
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ all_dofs: bool, optional
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
+ sector_mesh: MeshedRegion or MeshesContainer, optional
+ mesh of the base sector (can be a skin).
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded.
+ cyclic_support: CyclicSupport, optional
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
- expanded_meshes : MeshesContainer
+ fields_container: FieldsContainer
+ FieldsContainer filled in
+ expanded_meshes: MeshesContainer
Examples
--------
@@ -147,10 +143,11 @@ def __init__(
self.inputs.cyclic_support.connect(cyclic_support)
@staticmethod
- def _spec():
- description = """This operator is deprecated: use the operator mapdl::rst::NMISC with
- the read_cyclic pin instead. Compute mapdl::rst::NMISC
- from an rst file and expand it with cyclic symmetry."""
+ def _spec() -> Specification:
+ description = r"""This operator is deprecated: use the operator mapdl::rst::NMISC with the
+read_cyclic pin instead. Compute mapdl::rst::NMISC from an rst file and
+expand it with cyclic symmetry.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -158,76 +155,67 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container", "stream"],
optional=True,
- document="""Streams containing the result file.""",
+ document=r"""Streams containing the result file.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data sources containing the result file.""",
+ document=r"""data sources containing the result file.""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
6: PinSpecification(
name="all_dofs",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.""",
+ document=r"""if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.""",
),
7: PinSpecification(
name="sector_mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh of the base sector (can be a skin).""",
+ document=r"""mesh of the base sector (can be a skin).""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded.""",
+ document=r"""mesh expanded.""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -235,20 +223,20 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
1: PinSpecification(
name="expanded_meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -257,29 +245,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mapdl::rst::NMISC_cyclic", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicNmisc:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicNmisc
+ inputs:
+ An instance of InputsCyclicNmisc.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicNmisc:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicNmisc
+ outputs:
+ An instance of OutputsCyclicNmisc.
"""
return super().outputs
@@ -346,12 +341,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._cyclic_support)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -364,12 +360,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -382,15 +379,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -403,14 +400,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams containing the result file.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -423,14 +421,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data sources containing the result file.
+ data sources containing the result file.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -443,15 +442,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -464,17 +463,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def all_dofs(self):
- """Allows to connect all_dofs input to the operator.
+ def all_dofs(self) -> Input:
+ r"""Allows to connect all_dofs input to the operator.
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
- Parameters
- ----------
- my_all_dofs : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -487,14 +484,15 @@ def all_dofs(self):
return self._all_dofs
@property
- def sector_mesh(self):
- """Allows to connect sector_mesh input to the operator.
+ def sector_mesh(self) -> Input:
+ r"""Allows to connect sector_mesh input to the operator.
- Mesh of the base sector (can be a skin).
+ mesh of the base sector (can be a skin).
- Parameters
- ----------
- my_sector_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -507,18 +505,15 @@ def sector_mesh(self):
return self._sector_mesh
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -531,14 +526,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded.
+ mesh expanded.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -551,12 +547,13 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -590,35 +587,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._expanded_meshes)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_nmisc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
@property
- def expanded_meshes(self):
- """Allows to get expanded_meshes output of the operator
+ def expanded_meshes(self) -> Output:
+ r"""Allows to get expanded_meshes output of the operator
Returns
- ----------
- my_expanded_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_nmisc()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_expanded_meshes = op.outputs.expanded_meshes()
- """ # noqa: E501
+ """
return self._expanded_meshes
diff --git a/src/ansys/dpf/core/operators/result/cyclic_strain_energy.py b/src/ansys/dpf/core/operators/result/cyclic_strain_energy.py
index 80e58c119a1..dbb598513e9 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_strain_energy.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_strain_energy.py
@@ -4,54 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_strain_energy(Operator):
- """This operator is deprecated: use the operator mapdl::rst::ENG_SE with
- the read_cyclic pin instead. Compute mapdl::rst::ENG_SE from an
- rst file and expand it with cyclic symmetry.
+ r"""This operator is deprecated: use the operator mapdl::rst::ENG_SE with
+ the read_cyclic pin instead. Compute mapdl::rst::ENG_SE from an rst file
+ and expand it with cyclic symmetry.
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer or Stream, optional
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer or Stream, optional
Streams containing the result file.
- data_sources : DataSources
- Data sources containing the result file.
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- all_dofs : bool, optional
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
- sector_mesh : MeshedRegion or MeshesContainer, optional
- Mesh of the base sector (can be a skin).
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded.
- cyclic_support : CyclicSupport, optional
+ data_sources: DataSources
+ data sources containing the result file.
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ all_dofs: bool, optional
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
+ sector_mesh: MeshedRegion or MeshesContainer, optional
+ mesh of the base sector (can be a skin).
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded.
+ cyclic_support: CyclicSupport, optional
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
- expanded_meshes : MeshesContainer
+ fields_container: FieldsContainer
+ FieldsContainer filled in
+ expanded_meshes: MeshesContainer
Examples
--------
@@ -147,10 +143,11 @@ def __init__(
self.inputs.cyclic_support.connect(cyclic_support)
@staticmethod
- def _spec():
- description = """This operator is deprecated: use the operator mapdl::rst::ENG_SE with
- the read_cyclic pin instead. Compute mapdl::rst::ENG_SE
- from an rst file and expand it with cyclic symmetry."""
+ def _spec() -> Specification:
+ description = r"""This operator is deprecated: use the operator mapdl::rst::ENG_SE with
+the read_cyclic pin instead. Compute mapdl::rst::ENG_SE from an rst file
+and expand it with cyclic symmetry.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -158,76 +155,67 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container", "stream"],
optional=True,
- document="""Streams containing the result file.""",
+ document=r"""Streams containing the result file.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data sources containing the result file.""",
+ document=r"""data sources containing the result file.""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
6: PinSpecification(
name="all_dofs",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.""",
+ document=r"""if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.""",
),
7: PinSpecification(
name="sector_mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh of the base sector (can be a skin).""",
+ document=r"""mesh of the base sector (can be a skin).""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded.""",
+ document=r"""mesh expanded.""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -235,20 +223,20 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
1: PinSpecification(
name="expanded_meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -257,29 +245,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mapdl::rst::ENG_SE_cyclic", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicStrainEnergy:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicStrainEnergy
+ inputs:
+ An instance of InputsCyclicStrainEnergy.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicStrainEnergy:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicStrainEnergy
+ outputs:
+ An instance of OutputsCyclicStrainEnergy.
"""
return super().outputs
@@ -354,12 +349,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._cyclic_support)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -372,12 +368,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -390,15 +387,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -411,14 +408,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams containing the result file.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -431,14 +429,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data sources containing the result file.
+ data sources containing the result file.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -451,15 +450,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -472,17 +471,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def all_dofs(self):
- """Allows to connect all_dofs input to the operator.
+ def all_dofs(self) -> Input:
+ r"""Allows to connect all_dofs input to the operator.
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
- Parameters
- ----------
- my_all_dofs : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -495,14 +492,15 @@ def all_dofs(self):
return self._all_dofs
@property
- def sector_mesh(self):
- """Allows to connect sector_mesh input to the operator.
+ def sector_mesh(self) -> Input:
+ r"""Allows to connect sector_mesh input to the operator.
- Mesh of the base sector (can be a skin).
+ mesh of the base sector (can be a skin).
- Parameters
- ----------
- my_sector_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -515,18 +513,15 @@ def sector_mesh(self):
return self._sector_mesh
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -539,14 +534,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded.
+ mesh expanded.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -559,12 +555,13 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -602,35 +599,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._expanded_meshes)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_strain_energy()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
@property
- def expanded_meshes(self):
- """Allows to get expanded_meshes output of the operator
+ def expanded_meshes(self) -> Output:
+ r"""Allows to get expanded_meshes output of the operator
Returns
- ----------
- my_expanded_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_strain_energy()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_expanded_meshes = op.outputs.expanded_meshes()
- """ # noqa: E501
+ """
return self._expanded_meshes
diff --git a/src/ansys/dpf/core/operators/result/cyclic_volume.py b/src/ansys/dpf/core/operators/result/cyclic_volume.py
index 694861a56f5..874e6633991 100644
--- a/src/ansys/dpf/core/operators/result/cyclic_volume.py
+++ b/src/ansys/dpf/core/operators/result/cyclic_volume.py
@@ -4,54 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class cyclic_volume(Operator):
- """This operator is deprecated: use the operator mapdl::rst::ENG_VOL with
- the read_cyclic pin instead. Read mapdl::rst::ENG_VOL from an rst
- file and expand it with cyclic symmetry.
+ r"""This operator is deprecated: use the operator mapdl::rst::ENG_VOL with
+ the read_cyclic pin instead. Read mapdl::rst::ENG_VOL from an rst file
+ and expand it with cyclic symmetry.
+
Parameters
----------
- time_scoping : Scoping, optional
- mesh_scoping : ScopingsContainer or Scoping, optional
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer or Stream, optional
+ time_scoping: Scoping, optional
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer or Stream, optional
Streams containing the result file.
- data_sources : DataSources
- Data sources containing the result file.
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- all_dofs : bool, optional
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
- sector_mesh : MeshedRegion or MeshesContainer, optional
- Mesh of the base sector (can be a skin).
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded.
- cyclic_support : CyclicSupport, optional
+ data_sources: DataSources
+ data sources containing the result file.
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ all_dofs: bool, optional
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
+ sector_mesh: MeshedRegion or MeshesContainer, optional
+ mesh of the base sector (can be a skin).
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded.
+ cyclic_support: CyclicSupport, optional
Returns
-------
- fields_container : FieldsContainer
- Fieldscontainer filled in
- expanded_meshes : MeshesContainer
+ fields_container: FieldsContainer
+ FieldsContainer filled in
+ expanded_meshes: MeshesContainer
Examples
--------
@@ -149,10 +145,11 @@ def __init__(
self.inputs.cyclic_support.connect(cyclic_support)
@staticmethod
- def _spec():
- description = """This operator is deprecated: use the operator mapdl::rst::ENG_VOL with
- the read_cyclic pin instead. Read mapdl::rst::ENG_VOL from
- an rst file and expand it with cyclic symmetry."""
+ def _spec() -> Specification:
+ description = r"""This operator is deprecated: use the operator mapdl::rst::ENG_VOL with
+the read_cyclic pin instead. Read mapdl::rst::ENG_VOL from an rst file
+and expand it with cyclic symmetry.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -160,76 +157,67 @@ def _spec():
name="time_scoping",
type_names=["scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping", "vector"],
optional=True,
- document="""""",
+ document=r"""""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container", "stream"],
optional=True,
- document="""Streams containing the result file.""",
+ document=r"""Streams containing the result file.""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Data sources containing the result file.""",
+ document=r"""data sources containing the result file.""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
6: PinSpecification(
name="all_dofs",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.""",
+ document=r"""if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.""",
),
7: PinSpecification(
name="sector_mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh of the base sector (can be a skin).""",
+ document=r"""mesh of the base sector (can be a skin).""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded.""",
+ document=r"""mesh expanded.""",
),
16: PinSpecification(
name="cyclic_support",
type_names=["cyclic_support"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -237,20 +225,20 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""Fieldscontainer filled in""",
+ document=r"""FieldsContainer filled in""",
),
1: PinSpecification(
name="expanded_meshes",
type_names=["meshes_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -259,29 +247,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="mapdl::rst::ENG_VOL_cyclic", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsCyclicVolume:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsCyclicVolume
+ inputs:
+ An instance of InputsCyclicVolume.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsCyclicVolume:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsCyclicVolume
+ outputs:
+ An instance of OutputsCyclicVolume.
"""
return super().outputs
@@ -348,12 +343,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._cyclic_support)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -366,12 +362,13 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -384,15 +381,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -405,14 +402,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
Streams containing the result file.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -425,14 +423,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Data sources containing the result file.
+ data sources containing the result file.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -445,15 +444,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -466,17 +465,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def all_dofs(self):
- """Allows to connect all_dofs input to the operator.
+ def all_dofs(self) -> Input:
+ r"""Allows to connect all_dofs input to the operator.
- If this pin is set to true, all the dofs are
- retrieved. by default this pin is set
- to false and only the translational
- dofs are retrieved.
+ if this pin is set to true, all the dofs are retrieved. By default this pin is set to false and only the translational dofs are retrieved.
- Parameters
- ----------
- my_all_dofs : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -489,14 +486,15 @@ def all_dofs(self):
return self._all_dofs
@property
- def sector_mesh(self):
- """Allows to connect sector_mesh input to the operator.
+ def sector_mesh(self) -> Input:
+ r"""Allows to connect sector_mesh input to the operator.
- Mesh of the base sector (can be a skin).
+ mesh of the base sector (can be a skin).
- Parameters
- ----------
- my_sector_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -509,18 +507,15 @@ def sector_mesh(self):
return self._sector_mesh
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -533,14 +528,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded.
+ mesh expanded.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -553,12 +549,13 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def cyclic_support(self):
- """Allows to connect cyclic_support input to the operator.
+ def cyclic_support(self) -> Input:
+ r"""Allows to connect cyclic_support input to the operator.
- Parameters
- ----------
- my_cyclic_support : CyclicSupport
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -592,35 +589,39 @@ def __init__(self, op: Operator):
self._outputs.append(self._expanded_meshes)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
+
+ FieldsContainer filled in
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
@property
- def expanded_meshes(self):
- """Allows to get expanded_meshes output of the operator
+ def expanded_meshes(self) -> Output:
+ r"""Allows to get expanded_meshes output of the operator
Returns
- ----------
- my_expanded_meshes : MeshesContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.cyclic_volume()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_expanded_meshes = op.outputs.expanded_meshes()
- """ # noqa: E501
+ """
return self._expanded_meshes
diff --git a/src/ansys/dpf/core/operators/result/density.py b/src/ansys/dpf/core/operators/result/density.py
index 1c04de76cfb..0027586b6dc 100644
--- a/src/ansys/dpf/core/operators/result/density.py
+++ b/src/ansys/dpf/core/operators/result/density.py
@@ -4,73 +4,43 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class density(Operator):
- """Read Density by calling the readers defined by the datasources.
+ r"""Read Density by calling the readers defined by the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- region_scoping : Scoping or int, optional
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
- qualifiers1 : dict, optional
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
- qualifiers2 : dict, optional
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ region_scoping: Scoping or int, optional
+ region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).
+ qualifiers1: dict, optional
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
+ qualifiers2: dict, optional
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -147,10 +117,9 @@ def __init__(
self.inputs.qualifiers2.connect(qualifiers2)
@staticmethod
- def _spec():
- description = (
- """Read Density by calling the readers defined by the datasources."""
- )
+ def _spec() -> Specification:
+ description = r"""Read Density by calling the readers defined by the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -165,84 +134,49 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
25: PinSpecification(
name="region_scoping",
type_names=["scoping", "int32", "vector"],
optional=True,
- document="""Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).""",
+ document=r"""region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).""",
),
1000: PinSpecification(
name="qualifiers",
type_names=["label_space"],
optional=True,
- document="""(for fluid results only) labelspace with
- combination of zone, phases or
- species ids""",
+ document=r"""(for Fluid results only) LabelSpace with combination of zone, phases or species ids""",
),
1001: PinSpecification(
name="qualifiers",
type_names=["label_space"],
optional=True,
- document="""(for fluid results only) labelspace with
- combination of zone, phases or
- species ids""",
+ document=r"""(for Fluid results only) LabelSpace with combination of zone, phases or species ids""",
),
},
map_output_pin_spec={
@@ -250,14 +184,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -266,29 +200,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="RHO", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsDensity:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsDensity
+ inputs:
+ An instance of InputsDensity.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsDensity:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsDensity
+ outputs:
+ An instance of OutputsDensity.
"""
return super().outputs
@@ -339,28 +280,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._qualifiers2)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -373,24 +301,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -403,15 +322,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -424,15 +343,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -445,15 +364,15 @@ def data_sources(self):
return self._data_sources
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -466,18 +385,15 @@ def mesh(self):
return self._mesh
@property
- def region_scoping(self):
- """Allows to connect region_scoping input to the operator.
+ def region_scoping(self) -> Input:
+ r"""Allows to connect region_scoping input to the operator.
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
+ region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).
- Parameters
- ----------
- my_region_scoping : Scoping or int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -490,16 +406,15 @@ def region_scoping(self):
return self._region_scoping
@property
- def qualifiers1(self):
- """Allows to connect qualifiers1 input to the operator.
+ def qualifiers1(self) -> Input:
+ r"""Allows to connect qualifiers1 input to the operator.
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
- Parameters
- ----------
- my_qualifiers1 : dict
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -512,16 +427,15 @@ def qualifiers1(self):
return self._qualifiers1
@property
- def qualifiers2(self):
- """Allows to connect qualifiers2 input to the operator.
+ def qualifiers2(self) -> Input:
+ r"""Allows to connect qualifiers2 input to the operator.
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
- Parameters
- ----------
- my_qualifiers2 : dict
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -552,18 +466,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.density()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/displacement.py b/src/ansys/dpf/core/operators/result/displacement.py
index 93dff346995..4cd9527bf08 100644
--- a/src/ansys/dpf/core/operators/result/displacement.py
+++ b/src/ansys/dpf/core/operators/result/displacement.py
@@ -4,83 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class displacement(Operator):
- """Read/compute nodal displacements by calling the readers defined by the
+ r"""Read/compute nodal displacements by calling the readers defined by the
datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded, use if cyclic expansion is to
- be done.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
- phi : float, optional
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ mesh. If cylic expansion is to be done, mesh of the base sector
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded, use if cyclic expansion is to be done.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
+ phi: float, optional
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -175,9 +142,10 @@ def __init__(
self.inputs.phi.connect(phi)
@staticmethod
- def _spec():
- description = """Read/compute nodal displacements by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute nodal displacements by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -192,105 +160,67 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh. if cylic expansion is to be done, mesh
- of the base sector""",
+ document=r"""mesh. If cylic expansion is to be done, mesh of the base sector""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded, use if cyclic expansion is to
- be done.""",
+ document=r"""mesh expanded, use if cyclic expansion is to be done.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.""",
+ document=r"""angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.""",
),
},
map_output_pin_spec={
@@ -298,14 +228,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -314,29 +244,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="U", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsDisplacement:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsDisplacement
+ inputs:
+ An instance of InputsDisplacement.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsDisplacement:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsDisplacement
+ outputs:
+ An instance of OutputsDisplacement.
"""
return super().outputs
@@ -403,28 +340,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._phi)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -437,24 +361,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -467,15 +382,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -488,15 +403,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -509,15 +424,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -530,15 +445,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -551,15 +466,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
+ mesh. If cylic expansion is to be done, mesh of the base sector
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -572,18 +487,15 @@ def mesh(self):
return self._mesh
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -596,15 +508,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded, use if cyclic expansion is to
- be done.
+ mesh expanded, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -617,17 +529,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -640,15 +550,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -679,18 +589,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.displacement()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/displacement_X.py b/src/ansys/dpf/core/operators/result/displacement_X.py
index 43c58ea5629..669b5c6c009 100644
--- a/src/ansys/dpf/core/operators/result/displacement_X.py
+++ b/src/ansys/dpf/core/operators/result/displacement_X.py
@@ -4,72 +4,44 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class displacement_X(Operator):
- """Read/compute nodal displacements X component of the vector (1st
+ r"""Read/compute nodal displacements X component of the vector (1st
component) by calling the readers defined by the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -146,10 +118,10 @@ def __init__(
self.inputs.read_cyclic.connect(read_cyclic)
@staticmethod
- def _spec():
- description = """Read/compute nodal displacements X component of the vector (1st
- component) by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute nodal displacements X component of the vector (1st
+component) by calling the readers defined by the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -164,82 +136,49 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
},
map_output_pin_spec={
@@ -247,14 +186,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -263,29 +202,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="UX", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsDisplacementX:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsDisplacementX
+ inputs:
+ An instance of InputsDisplacementX.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsDisplacementX:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsDisplacementX
+ outputs:
+ An instance of OutputsDisplacementX.
"""
return super().outputs
@@ -338,28 +284,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_cyclic)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -372,24 +305,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -402,15 +326,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -423,15 +347,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -444,15 +368,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -465,15 +389,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -486,15 +410,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -507,18 +431,15 @@ def mesh(self):
return self._mesh
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -549,18 +470,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.displacement_X()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/displacement_Y.py b/src/ansys/dpf/core/operators/result/displacement_Y.py
index ff849bb2b0c..4493ea9a28d 100644
--- a/src/ansys/dpf/core/operators/result/displacement_Y.py
+++ b/src/ansys/dpf/core/operators/result/displacement_Y.py
@@ -4,72 +4,44 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class displacement_Y(Operator):
- """Read/compute nodal displacements Y component of the vector (2nd
+ r"""Read/compute nodal displacements Y component of the vector (2nd
component) by calling the readers defined by the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -146,10 +118,10 @@ def __init__(
self.inputs.read_cyclic.connect(read_cyclic)
@staticmethod
- def _spec():
- description = """Read/compute nodal displacements Y component of the vector (2nd
- component) by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute nodal displacements Y component of the vector (2nd
+component) by calling the readers defined by the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -164,82 +136,49 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
},
map_output_pin_spec={
@@ -247,14 +186,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -263,29 +202,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="UY", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsDisplacementY:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsDisplacementY
+ inputs:
+ An instance of InputsDisplacementY.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsDisplacementY:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsDisplacementY
+ outputs:
+ An instance of OutputsDisplacementY.
"""
return super().outputs
@@ -338,28 +284,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_cyclic)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -372,24 +305,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -402,15 +326,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -423,15 +347,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -444,15 +368,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -465,15 +389,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -486,15 +410,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -507,18 +431,15 @@ def mesh(self):
return self._mesh
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -549,18 +470,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.displacement_Y()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/displacement_Z.py b/src/ansys/dpf/core/operators/result/displacement_Z.py
index 540f68079b7..a3d92135138 100644
--- a/src/ansys/dpf/core/operators/result/displacement_Z.py
+++ b/src/ansys/dpf/core/operators/result/displacement_Z.py
@@ -4,72 +4,44 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class displacement_Z(Operator):
- """Read/compute nodal displacements Z component of the vector (3rd
+ r"""Read/compute nodal displacements Z component of the vector (3rd
component) by calling the readers defined by the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -146,10 +118,10 @@ def __init__(
self.inputs.read_cyclic.connect(read_cyclic)
@staticmethod
- def _spec():
- description = """Read/compute nodal displacements Z component of the vector (3rd
- component) by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute nodal displacements Z component of the vector (3rd
+component) by calling the readers defined by the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -164,82 +136,49 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
},
map_output_pin_spec={
@@ -247,14 +186,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -263,29 +202,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="UZ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsDisplacementZ:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsDisplacementZ
+ inputs:
+ An instance of InputsDisplacementZ.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsDisplacementZ:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsDisplacementZ
+ outputs:
+ An instance of OutputsDisplacementZ.
"""
return super().outputs
@@ -338,28 +284,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_cyclic)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -372,24 +305,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -402,15 +326,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -423,15 +347,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -444,15 +368,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -465,15 +389,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -486,15 +410,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -507,18 +431,15 @@ def mesh(self):
return self._mesh
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -549,18 +470,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.displacement_Z()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/div_lighthill_tensor.py b/src/ansys/dpf/core/operators/result/div_lighthill_tensor.py
index 12aaee5d7ab..c1107b7cfd5 100644
--- a/src/ansys/dpf/core/operators/result/div_lighthill_tensor.py
+++ b/src/ansys/dpf/core/operators/result/div_lighthill_tensor.py
@@ -4,74 +4,44 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class div_lighthill_tensor(Operator):
- """Read Divergence of the Lighthill Tensor by calling the readers defined
+ r"""Read Divergence of the Lighthill Tensor by calling the readers defined
by the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- region_scoping : Scoping or int, optional
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
- qualifiers1 : dict, optional
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
- qualifiers2 : dict, optional
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ region_scoping: Scoping or int, optional
+ region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).
+ qualifiers1: dict, optional
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
+ qualifiers2: dict, optional
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -148,9 +118,10 @@ def __init__(
self.inputs.qualifiers2.connect(qualifiers2)
@staticmethod
- def _spec():
- description = """Read Divergence of the Lighthill Tensor by calling the readers defined
- by the datasources."""
+ def _spec() -> Specification:
+ description = r"""Read Divergence of the Lighthill Tensor by calling the readers defined
+by the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -165,84 +136,49 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
25: PinSpecification(
name="region_scoping",
type_names=["scoping", "int32", "vector"],
optional=True,
- document="""Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).""",
+ document=r"""region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).""",
),
1000: PinSpecification(
name="qualifiers",
type_names=["label_space"],
optional=True,
- document="""(for fluid results only) labelspace with
- combination of zone, phases or
- species ids""",
+ document=r"""(for Fluid results only) LabelSpace with combination of zone, phases or species ids""",
),
1001: PinSpecification(
name="qualifiers",
type_names=["label_space"],
optional=True,
- document="""(for fluid results only) labelspace with
- combination of zone, phases or
- species ids""",
+ document=r"""(for Fluid results only) LabelSpace with combination of zone, phases or species ids""",
),
},
map_output_pin_spec={
@@ -250,14 +186,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -266,29 +202,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="SV_RTDFT", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsDivLighthillTensor:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsDivLighthillTensor
+ inputs:
+ An instance of InputsDivLighthillTensor.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsDivLighthillTensor:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsDivLighthillTensor
+ outputs:
+ An instance of OutputsDivLighthillTensor.
"""
return super().outputs
@@ -347,28 +290,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._qualifiers2)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -381,24 +311,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -411,15 +332,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -432,15 +353,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -453,15 +374,15 @@ def data_sources(self):
return self._data_sources
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -474,18 +395,15 @@ def mesh(self):
return self._mesh
@property
- def region_scoping(self):
- """Allows to connect region_scoping input to the operator.
+ def region_scoping(self) -> Input:
+ r"""Allows to connect region_scoping input to the operator.
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
+ region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).
- Parameters
- ----------
- my_region_scoping : Scoping or int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -498,16 +416,15 @@ def region_scoping(self):
return self._region_scoping
@property
- def qualifiers1(self):
- """Allows to connect qualifiers1 input to the operator.
+ def qualifiers1(self) -> Input:
+ r"""Allows to connect qualifiers1 input to the operator.
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
- Parameters
- ----------
- my_qualifiers1 : dict
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -520,16 +437,15 @@ def qualifiers1(self):
return self._qualifiers1
@property
- def qualifiers2(self):
- """Allows to connect qualifiers2 input to the operator.
+ def qualifiers2(self) -> Input:
+ r"""Allows to connect qualifiers2 input to the operator.
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
- Parameters
- ----------
- my_qualifiers2 : dict
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,18 +478,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.div_lighthill_tensor()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/dynamic_viscosity.py b/src/ansys/dpf/core/operators/result/dynamic_viscosity.py
index 464d613ba2b..2319ff3b83a 100644
--- a/src/ansys/dpf/core/operators/result/dynamic_viscosity.py
+++ b/src/ansys/dpf/core/operators/result/dynamic_viscosity.py
@@ -4,74 +4,44 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class dynamic_viscosity(Operator):
- """Read Dynamic Viscosity by calling the readers defined by the
+ r"""Read Dynamic Viscosity by calling the readers defined by the
datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- region_scoping : Scoping or int, optional
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
- qualifiers1 : dict, optional
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
- qualifiers2 : dict, optional
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ region_scoping: Scoping or int, optional
+ region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).
+ qualifiers1: dict, optional
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
+ qualifiers2: dict, optional
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -148,9 +118,10 @@ def __init__(
self.inputs.qualifiers2.connect(qualifiers2)
@staticmethod
- def _spec():
- description = """Read Dynamic Viscosity by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read Dynamic Viscosity by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -165,84 +136,49 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
25: PinSpecification(
name="region_scoping",
type_names=["scoping", "int32", "vector"],
optional=True,
- document="""Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).""",
+ document=r"""region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).""",
),
1000: PinSpecification(
name="qualifiers",
type_names=["label_space"],
optional=True,
- document="""(for fluid results only) labelspace with
- combination of zone, phases or
- species ids""",
+ document=r"""(for Fluid results only) LabelSpace with combination of zone, phases or species ids""",
),
1001: PinSpecification(
name="qualifiers",
type_names=["label_space"],
optional=True,
- document="""(for fluid results only) labelspace with
- combination of zone, phases or
- species ids""",
+ document=r"""(for Fluid results only) LabelSpace with combination of zone, phases or species ids""",
),
},
map_output_pin_spec={
@@ -250,14 +186,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -266,29 +202,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="MU", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsDynamicViscosity:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsDynamicViscosity
+ inputs:
+ An instance of InputsDynamicViscosity.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsDynamicViscosity:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsDynamicViscosity
+ outputs:
+ An instance of OutputsDynamicViscosity.
"""
return super().outputs
@@ -347,28 +290,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._qualifiers2)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -381,24 +311,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -411,15 +332,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -432,15 +353,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -453,15 +374,15 @@ def data_sources(self):
return self._data_sources
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -474,18 +395,15 @@ def mesh(self):
return self._mesh
@property
- def region_scoping(self):
- """Allows to connect region_scoping input to the operator.
+ def region_scoping(self) -> Input:
+ r"""Allows to connect region_scoping input to the operator.
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
+ region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).
- Parameters
- ----------
- my_region_scoping : Scoping or int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -498,16 +416,15 @@ def region_scoping(self):
return self._region_scoping
@property
- def qualifiers1(self):
- """Allows to connect qualifiers1 input to the operator.
+ def qualifiers1(self) -> Input:
+ r"""Allows to connect qualifiers1 input to the operator.
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
- Parameters
- ----------
- my_qualifiers1 : dict
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -520,16 +437,15 @@ def qualifiers1(self):
return self._qualifiers1
@property
- def qualifiers2(self):
- """Allows to connect qualifiers2 input to the operator.
+ def qualifiers2(self) -> Input:
+ r"""Allows to connect qualifiers2 input to the operator.
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
- Parameters
- ----------
- my_qualifiers2 : dict
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -560,18 +476,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.dynamic_viscosity()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elastic_strain.py b/src/ansys/dpf/core/operators/result/elastic_strain.py
index 3c1e68e86f5..019325e5647 100644
--- a/src/ansys/dpf/core/operators/result/elastic_strain.py
+++ b/src/ansys/dpf/core/operators/result/elastic_strain.py
@@ -4,108 +4,60 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elastic_strain(Operator):
- """Read/compute element nodal component elastic strains by calling the
- readers defined by the datasources. Regarding the requested
- location and the input mesh scoping, the result location can be
+ r"""Read/compute element nodal component elastic strains by calling the
+ readers defined by the datasources. Regarding the requested location and
+ the input mesh scoping, the result location can be
Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded, use if cyclic expansion is to
- be done.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
- phi : float, optional
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ mesh. If cylic expansion is to be done, mesh of the base sector
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded, use if cyclic expansion is to be done.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
+ phi: float, optional
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -224,11 +176,12 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element nodal component elastic strains by calling the
- readers defined by the datasources. Regarding the
- requested location and the input mesh scoping, the result
- location can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal component elastic strains by calling the
+readers defined by the datasources. Regarding the requested location and
+the input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -243,144 +196,91 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh. if cylic expansion is to be done, mesh
- of the base sector""",
+ document=r"""mesh. If cylic expansion is to be done, mesh of the base sector""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded, use if cyclic expansion is to
- be done.""",
+ document=r"""mesh expanded, use if cyclic expansion is to be done.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.""",
+ document=r"""angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -388,14 +288,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -404,29 +304,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EPEL", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElasticStrain:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElasticStrain
+ inputs:
+ An instance of InputsElasticStrain.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElasticStrain:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElasticStrain
+ outputs:
+ An instance of OutputsElasticStrain.
"""
return super().outputs
@@ -511,28 +418,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -545,24 +439,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -575,15 +460,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -596,15 +481,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -617,15 +502,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -638,15 +523,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -659,15 +544,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
+ mesh. If cylic expansion is to be done, mesh of the base sector
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -680,15 +565,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -701,18 +586,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -725,15 +607,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded, use if cyclic expansion is to
- be done.
+ mesh expanded, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -746,17 +628,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -769,15 +649,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -790,15 +670,15 @@ def phi(self):
return self._phi
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -811,20 +691,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -837,21 +712,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -882,18 +751,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elastic_strain()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_X.py b/src/ansys/dpf/core/operators/result/elastic_strain_X.py
index 39e85bd2f93..6c7d9259716 100644
--- a/src/ansys/dpf/core/operators/result/elastic_strain_X.py
+++ b/src/ansys/dpf/core/operators/result/elastic_strain_X.py
@@ -4,80 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elastic_strain_X(Operator):
- """Read/compute element nodal component elastic strains XX normal
- component (00 component) by calling the readers defined by the
- datasources. Regarding the requested location and the input mesh
- scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element nodal component elastic strains XX normal component
+ (00 component) by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh scoping, the result
+ location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -166,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute element nodal component elastic strains XX normal
- component (00 component) by calling the readers defined by
- the datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal component elastic strains XX normal component
+(00 component) by calling the readers defined by the datasources.
+Regarding the requested location and the input mesh scoping, the result
+location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -186,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -282,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -298,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EPELX", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElasticStrainX:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElasticStrainX
+ inputs:
+ An instance of InputsElasticStrainX.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElasticStrainX:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElasticStrainX
+ outputs:
+ An instance of OutputsElasticStrainX.
"""
return super().outputs
@@ -385,28 +328,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -419,24 +349,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -449,15 +370,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -470,15 +391,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -491,15 +412,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -512,15 +433,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -533,15 +454,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -554,14 +475,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -574,18 +496,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -598,15 +517,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -637,18 +556,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elastic_strain_X()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_XY.py b/src/ansys/dpf/core/operators/result/elastic_strain_XY.py
index 8d670640e84..4800d3927a4 100644
--- a/src/ansys/dpf/core/operators/result/elastic_strain_XY.py
+++ b/src/ansys/dpf/core/operators/result/elastic_strain_XY.py
@@ -4,80 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elastic_strain_XY(Operator):
- """Read/compute element nodal component elastic strains XY shear
- component (01 component) by calling the readers defined by the
- datasources. Regarding the requested location and the input mesh
- scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element nodal component elastic strains XY shear component
+ (01 component) by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh scoping, the result
+ location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -166,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute element nodal component elastic strains XY shear
- component (01 component) by calling the readers defined by
- the datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal component elastic strains XY shear component
+(01 component) by calling the readers defined by the datasources.
+Regarding the requested location and the input mesh scoping, the result
+location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -186,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -282,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -298,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EPELXY", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElasticStrainXy:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElasticStrainXy
+ inputs:
+ An instance of InputsElasticStrainXy.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElasticStrainXy:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElasticStrainXy
+ outputs:
+ An instance of OutputsElasticStrainXy.
"""
return super().outputs
@@ -387,28 +330,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -421,24 +351,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -451,15 +372,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -472,15 +393,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -493,15 +414,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -514,15 +435,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -535,15 +456,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -556,14 +477,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -576,18 +498,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -600,15 +519,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -639,18 +558,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elastic_strain_XY()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_XZ.py b/src/ansys/dpf/core/operators/result/elastic_strain_XZ.py
index 40cbc58507b..8c4061469f5 100644
--- a/src/ansys/dpf/core/operators/result/elastic_strain_XZ.py
+++ b/src/ansys/dpf/core/operators/result/elastic_strain_XZ.py
@@ -4,80 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elastic_strain_XZ(Operator):
- """Read/compute element nodal component elastic strains XZ shear
- component (02 component) by calling the readers defined by the
- datasources. Regarding the requested location and the input mesh
- scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element nodal component elastic strains XZ shear component
+ (02 component) by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh scoping, the result
+ location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -166,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute element nodal component elastic strains XZ shear
- component (02 component) by calling the readers defined by
- the datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal component elastic strains XZ shear component
+(02 component) by calling the readers defined by the datasources.
+Regarding the requested location and the input mesh scoping, the result
+location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -186,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -282,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -298,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EPELXZ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElasticStrainXz:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElasticStrainXz
+ inputs:
+ An instance of InputsElasticStrainXz.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElasticStrainXz:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElasticStrainXz
+ outputs:
+ An instance of OutputsElasticStrainXz.
"""
return super().outputs
@@ -387,28 +330,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -421,24 +351,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -451,15 +372,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -472,15 +393,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -493,15 +414,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -514,15 +435,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -535,15 +456,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -556,14 +477,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -576,18 +498,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -600,15 +519,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -639,18 +558,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elastic_strain_XZ()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_Y.py b/src/ansys/dpf/core/operators/result/elastic_strain_Y.py
index e6aed30614b..72c7d4f0a04 100644
--- a/src/ansys/dpf/core/operators/result/elastic_strain_Y.py
+++ b/src/ansys/dpf/core/operators/result/elastic_strain_Y.py
@@ -4,80 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elastic_strain_Y(Operator):
- """Read/compute element nodal component elastic strains YY normal
- component (11 component) by calling the readers defined by the
- datasources. Regarding the requested location and the input mesh
- scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element nodal component elastic strains YY normal component
+ (11 component) by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh scoping, the result
+ location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -166,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute element nodal component elastic strains YY normal
- component (11 component) by calling the readers defined by
- the datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal component elastic strains YY normal component
+(11 component) by calling the readers defined by the datasources.
+Regarding the requested location and the input mesh scoping, the result
+location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -186,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -282,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -298,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EPELY", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElasticStrainY:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElasticStrainY
+ inputs:
+ An instance of InputsElasticStrainY.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElasticStrainY:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElasticStrainY
+ outputs:
+ An instance of OutputsElasticStrainY.
"""
return super().outputs
@@ -385,28 +328,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -419,24 +349,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -449,15 +370,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -470,15 +391,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -491,15 +412,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -512,15 +433,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -533,15 +454,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -554,14 +475,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -574,18 +496,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -598,15 +517,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -637,18 +556,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elastic_strain_Y()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_YZ.py b/src/ansys/dpf/core/operators/result/elastic_strain_YZ.py
index d9a09882875..3cb8309e071 100644
--- a/src/ansys/dpf/core/operators/result/elastic_strain_YZ.py
+++ b/src/ansys/dpf/core/operators/result/elastic_strain_YZ.py
@@ -4,80 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elastic_strain_YZ(Operator):
- """Read/compute element nodal component elastic strains YZ shear
- component (12 component) by calling the readers defined by the
- datasources. Regarding the requested location and the input mesh
- scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element nodal component elastic strains YZ shear component
+ (12 component) by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh scoping, the result
+ location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -166,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute element nodal component elastic strains YZ shear
- component (12 component) by calling the readers defined by
- the datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal component elastic strains YZ shear component
+(12 component) by calling the readers defined by the datasources.
+Regarding the requested location and the input mesh scoping, the result
+location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -186,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -282,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -298,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EPELYZ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElasticStrainYz:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElasticStrainYz
+ inputs:
+ An instance of InputsElasticStrainYz.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElasticStrainYz:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElasticStrainYz
+ outputs:
+ An instance of OutputsElasticStrainYz.
"""
return super().outputs
@@ -387,28 +330,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -421,24 +351,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -451,15 +372,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -472,15 +393,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -493,15 +414,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -514,15 +435,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -535,15 +456,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -556,14 +477,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -576,18 +498,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -600,15 +519,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -639,18 +558,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elastic_strain_YZ()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_Z.py b/src/ansys/dpf/core/operators/result/elastic_strain_Z.py
index c7b3f735a5a..eca24c466cc 100644
--- a/src/ansys/dpf/core/operators/result/elastic_strain_Z.py
+++ b/src/ansys/dpf/core/operators/result/elastic_strain_Z.py
@@ -4,80 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elastic_strain_Z(Operator):
- """Read/compute element nodal component elastic strains ZZ normal
- component (22 component) by calling the readers defined by the
- datasources. Regarding the requested location and the input mesh
- scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element nodal component elastic strains ZZ normal component
+ (22 component) by calling the readers defined by the datasources.
+ Regarding the requested location and the input mesh scoping, the result
+ location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -166,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute element nodal component elastic strains ZZ normal
- component (22 component) by calling the readers defined by
- the datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal component elastic strains ZZ normal component
+(22 component) by calling the readers defined by the datasources.
+Regarding the requested location and the input mesh scoping, the result
+location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -186,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -282,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -298,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EPELZ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElasticStrainZ:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElasticStrainZ
+ inputs:
+ An instance of InputsElasticStrainZ.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElasticStrainZ:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElasticStrainZ
+ outputs:
+ An instance of OutputsElasticStrainZ.
"""
return super().outputs
@@ -385,28 +328,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -419,24 +349,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -449,15 +370,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -470,15 +391,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -491,15 +412,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -512,15 +433,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -533,15 +454,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -554,14 +475,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -574,18 +496,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -598,15 +517,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -637,18 +556,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elastic_strain_Z()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_energy_density.py b/src/ansys/dpf/core/operators/result/elastic_strain_energy_density.py
index 5b6e22bc7c9..c78d2040862 100644
--- a/src/ansys/dpf/core/operators/result/elastic_strain_energy_density.py
+++ b/src/ansys/dpf/core/operators/result/elastic_strain_energy_density.py
@@ -4,91 +4,52 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elastic_strain_energy_density(Operator):
- """Read/compute element nodal elastic strain energy density by calling
- the readers defined by the datasources. Regarding the requested
- location and the input mesh scoping, the result location can be
+ r"""Read/compute element nodal elastic strain energy density by calling the
+ readers defined by the datasources. Regarding the requested location and
+ the input mesh scoping, the result location can be
Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -183,11 +144,12 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element nodal elastic strain energy density by calling
- the readers defined by the datasources. Regarding the
- requested location and the input mesh scoping, the result
- location can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal elastic strain energy density by calling the
+readers defined by the datasources. Regarding the requested location and
+the input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -202,111 +164,67 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -314,14 +232,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -330,29 +248,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ENL_ELENG", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElasticStrainEnergyDensity:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElasticStrainEnergyDensity
+ inputs:
+ An instance of InputsElasticStrainEnergyDensity.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElasticStrainEnergyDensity:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElasticStrainEnergyDensity
+ outputs:
+ An instance of OutputsElasticStrainEnergyDensity.
"""
return super().outputs
@@ -437,28 +362,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -471,24 +383,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -501,15 +404,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -522,15 +425,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -543,15 +446,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -564,15 +467,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -585,15 +488,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -606,15 +509,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -627,15 +530,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -648,20 +551,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -674,21 +572,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -721,18 +613,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elastic_strain_energy_density()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_eqv.py b/src/ansys/dpf/core/operators/result/elastic_strain_eqv.py
index 698e124fbf8..b5354a3c4e0 100644
--- a/src/ansys/dpf/core/operators/result/elastic_strain_eqv.py
+++ b/src/ansys/dpf/core/operators/result/elastic_strain_eqv.py
@@ -4,91 +4,52 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elastic_strain_eqv(Operator):
- """Read/compute element nodal equivalent elastic strain by calling the
- readers defined by the datasources. Regarding the requested
- location and the input mesh scoping, the result location can be
+ r"""Read/compute element nodal equivalent elastic strain by calling the
+ readers defined by the datasources. Regarding the requested location and
+ the input mesh scoping, the result location can be
Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -183,11 +144,12 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element nodal equivalent elastic strain by calling the
- readers defined by the datasources. Regarding the
- requested location and the input mesh scoping, the result
- location can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal equivalent elastic strain by calling the
+readers defined by the datasources. Regarding the requested location and
+the input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -202,111 +164,67 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -314,14 +232,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -330,29 +248,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EPEL_EQV", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElasticStrainEqv:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElasticStrainEqv
+ inputs:
+ An instance of InputsElasticStrainEqv.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElasticStrainEqv:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElasticStrainEqv
+ outputs:
+ An instance of OutputsElasticStrainEqv.
"""
return super().outputs
@@ -423,28 +348,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -457,24 +369,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -487,15 +390,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -508,15 +411,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -529,15 +432,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -550,15 +453,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -571,15 +474,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -592,15 +495,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -613,15 +516,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -634,20 +537,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -660,21 +558,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -705,18 +597,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elastic_strain_eqv()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_intensity.py b/src/ansys/dpf/core/operators/result/elastic_strain_intensity.py
index 44490dafafa..36ddd01ea34 100644
--- a/src/ansys/dpf/core/operators/result/elastic_strain_intensity.py
+++ b/src/ansys/dpf/core/operators/result/elastic_strain_intensity.py
@@ -4,76 +4,47 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elastic_strain_intensity(Operator):
- """Reads/computes element nodal component elastic strains, average it on
+ r"""Reads/computes element nodal component elastic strains, average it on
nodes (by default) and computes its invariants.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -162,9 +133,10 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Reads/computes element nodal component elastic strains, average it on
- nodes (by default) and computes its invariants."""
+ def _spec() -> Specification:
+ description = r"""Reads/computes element nodal component elastic strains, average it on
+nodes (by default) and computes its invariants.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -179,95 +151,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""""",
+ document=r"""""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -275,14 +213,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -291,29 +229,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EPEL_intensity", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElasticStrainIntensity:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElasticStrainIntensity
+ inputs:
+ An instance of InputsElasticStrainIntensity.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElasticStrainIntensity:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElasticStrainIntensity
+ outputs:
+ An instance of OutputsElasticStrainIntensity.
"""
return super().outputs
@@ -390,28 +335,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -424,24 +356,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -454,15 +377,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -475,15 +398,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -496,15 +419,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -517,15 +440,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -538,15 +461,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -559,12 +482,13 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -577,18 +501,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -601,15 +522,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -642,18 +563,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elastic_strain_intensity()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_max_shear.py b/src/ansys/dpf/core/operators/result/elastic_strain_max_shear.py
index 1d80c312042..8a1fd8d22d2 100644
--- a/src/ansys/dpf/core/operators/result/elastic_strain_max_shear.py
+++ b/src/ansys/dpf/core/operators/result/elastic_strain_max_shear.py
@@ -4,76 +4,47 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elastic_strain_max_shear(Operator):
- """Reads/computes element nodal component elastic strains, average it on
+ r"""Reads/computes element nodal component elastic strains, average it on
nodes (by default) and computes its invariants.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -162,9 +133,10 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Reads/computes element nodal component elastic strains, average it on
- nodes (by default) and computes its invariants."""
+ def _spec() -> Specification:
+ description = r"""Reads/computes element nodal component elastic strains, average it on
+nodes (by default) and computes its invariants.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -179,95 +151,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""""",
+ document=r"""""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -275,14 +213,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -291,29 +229,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EPEL_max_shear", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElasticStrainMaxShear:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElasticStrainMaxShear
+ inputs:
+ An instance of InputsElasticStrainMaxShear.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElasticStrainMaxShear:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElasticStrainMaxShear
+ outputs:
+ An instance of OutputsElasticStrainMaxShear.
"""
return super().outputs
@@ -390,28 +335,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -424,24 +356,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -454,15 +377,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -475,15 +398,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -496,15 +419,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -517,15 +440,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -538,15 +461,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -559,12 +482,13 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -577,18 +501,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -601,15 +522,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -642,18 +563,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elastic_strain_max_shear()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_principal_1.py b/src/ansys/dpf/core/operators/result/elastic_strain_principal_1.py
index adb77fe3b32..cae0015f3ac 100644
--- a/src/ansys/dpf/core/operators/result/elastic_strain_principal_1.py
+++ b/src/ansys/dpf/core/operators/result/elastic_strain_principal_1.py
@@ -4,78 +4,49 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elastic_strain_principal_1(Operator):
- """Read/compute element nodal component elastic strains 1st principal
+ r"""Read/compute element nodal component elastic strains 1st principal
component by calling the readers defined by the datasources and
- computing its eigen values. The off-diagonal strains are first
- converted from Voigt notation to the standard strain values.
+ computing its eigen values. The off-diagonal strains are first converted
+ from Voigt notation to the standard strain values.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -164,12 +135,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute element nodal component elastic strains 1st principal
- component by calling the readers defined by the
- datasources and computing its eigen values. The off-
- diagonal strains are first converted from Voigt notation
- to the standard strain values."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal component elastic strains 1st principal
+component by calling the readers defined by the datasources and
+computing its eigen values. The off-diagonal strains are first converted
+from Voigt notation to the standard strain values.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -184,95 +155,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""""",
+ document=r"""""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -280,14 +217,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -296,29 +233,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EPEL1", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElasticStrainPrincipal1:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElasticStrainPrincipal1
+ inputs:
+ An instance of InputsElasticStrainPrincipal1.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElasticStrainPrincipal1:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElasticStrainPrincipal1
+ outputs:
+ An instance of OutputsElasticStrainPrincipal1.
"""
return super().outputs
@@ -395,28 +339,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -429,24 +360,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -459,15 +381,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -480,15 +402,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -501,15 +423,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -522,15 +444,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -543,15 +465,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -564,12 +486,13 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -582,18 +505,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -606,15 +526,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -647,18 +567,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elastic_strain_principal_1()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_principal_2.py b/src/ansys/dpf/core/operators/result/elastic_strain_principal_2.py
index 7d5852898ad..a2fa240be9a 100644
--- a/src/ansys/dpf/core/operators/result/elastic_strain_principal_2.py
+++ b/src/ansys/dpf/core/operators/result/elastic_strain_principal_2.py
@@ -4,78 +4,49 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elastic_strain_principal_2(Operator):
- """Read/compute element nodal component elastic strains 2nd principal
+ r"""Read/compute element nodal component elastic strains 2nd principal
component by calling the readers defined by the datasources and
- computing its eigen values. The off-diagonal strains are first
- converted from Voigt notation to the standard strain values.
+ computing its eigen values. The off-diagonal strains are first converted
+ from Voigt notation to the standard strain values.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -164,12 +135,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute element nodal component elastic strains 2nd principal
- component by calling the readers defined by the
- datasources and computing its eigen values. The off-
- diagonal strains are first converted from Voigt notation
- to the standard strain values."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal component elastic strains 2nd principal
+component by calling the readers defined by the datasources and
+computing its eigen values. The off-diagonal strains are first converted
+from Voigt notation to the standard strain values.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -184,95 +155,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""""",
+ document=r"""""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -280,14 +217,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -296,29 +233,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EPEL2", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElasticStrainPrincipal2:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElasticStrainPrincipal2
+ inputs:
+ An instance of InputsElasticStrainPrincipal2.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElasticStrainPrincipal2:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElasticStrainPrincipal2
+ outputs:
+ An instance of OutputsElasticStrainPrincipal2.
"""
return super().outputs
@@ -395,28 +339,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -429,24 +360,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -459,15 +381,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -480,15 +402,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -501,15 +423,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -522,15 +444,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -543,15 +465,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -564,12 +486,13 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -582,18 +505,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -606,15 +526,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -647,18 +567,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elastic_strain_principal_2()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_principal_3.py b/src/ansys/dpf/core/operators/result/elastic_strain_principal_3.py
index 23982918384..9718119f958 100644
--- a/src/ansys/dpf/core/operators/result/elastic_strain_principal_3.py
+++ b/src/ansys/dpf/core/operators/result/elastic_strain_principal_3.py
@@ -4,78 +4,49 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elastic_strain_principal_3(Operator):
- """Read/compute element nodal component elastic strains 3rd principal
+ r"""Read/compute element nodal component elastic strains 3rd principal
component by calling the readers defined by the datasources and
- computing its eigen values. The off-diagonal strains are first
- converted from Voigt notation to the standard strain values.
+ computing its eigen values. The off-diagonal strains are first converted
+ from Voigt notation to the standard strain values.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -164,12 +135,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute element nodal component elastic strains 3rd principal
- component by calling the readers defined by the
- datasources and computing its eigen values. The off-
- diagonal strains are first converted from Voigt notation
- to the standard strain values."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal component elastic strains 3rd principal
+component by calling the readers defined by the datasources and
+computing its eigen values. The off-diagonal strains are first converted
+from Voigt notation to the standard strain values.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -184,95 +155,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""""",
+ document=r"""""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -280,14 +217,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -296,29 +233,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EPEL3", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElasticStrainPrincipal3:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElasticStrainPrincipal3
+ inputs:
+ An instance of InputsElasticStrainPrincipal3.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElasticStrainPrincipal3:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElasticStrainPrincipal3
+ outputs:
+ An instance of OutputsElasticStrainPrincipal3.
"""
return super().outputs
@@ -395,28 +339,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -429,24 +360,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -459,15 +381,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -480,15 +402,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -501,15 +423,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -522,15 +444,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -543,15 +465,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -564,12 +486,13 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -582,18 +505,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -606,15 +526,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -647,18 +567,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elastic_strain_principal_3()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elastic_strain_rotation_by_euler_nodes.py b/src/ansys/dpf/core/operators/result/elastic_strain_rotation_by_euler_nodes.py
index 6d6c4288827..338f572b7b1 100644
--- a/src/ansys/dpf/core/operators/result/elastic_strain_rotation_by_euler_nodes.py
+++ b/src/ansys/dpf/core/operators/result/elastic_strain_rotation_by_euler_nodes.py
@@ -4,27 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elastic_strain_rotation_by_euler_nodes(Operator):
- """read Euler angles on elements from the result file and rotate the
- fields in the fieldsContainer.
+ r"""read Euler angles on elements from the result file and rotate the fields
+ in the fieldsContainer.
+
Parameters
----------
- fields_container : FieldsContainer, optional
- streams_container : StreamsContainer or Stream or Class
+ fields_container: FieldsContainer, optional
+ streams_container: StreamsContainer or Stream or Class
Dataprocessing::Crstfilewrapper, optional
- data_sources : DataSources
+ data_sources: DataSources
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -75,9 +80,10 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """read Euler angles on elements from the result file and rotate the
- fields in the fieldsContainer."""
+ def _spec() -> Specification:
+ description = r"""read Euler angles on elements from the result file and rotate the fields
+in the fieldsContainer.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -85,7 +91,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="streams_container",
@@ -95,13 +101,13 @@ def _spec():
"class dataProcessing::CRstFileWrapper",
],
optional=True,
- document="""""",
+ document=r"""""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -109,14 +115,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -125,31 +131,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="mapdl::rst::EPEL_rotation_by_euler_nodes", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsElasticStrainRotationByEulerNodes:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElasticStrainRotationByEulerNodes
+ inputs:
+ An instance of InputsElasticStrainRotationByEulerNodes.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElasticStrainRotationByEulerNodes:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElasticStrainRotationByEulerNodes
+ outputs:
+ An instance of OutputsElasticStrainRotationByEulerNodes.
"""
return super().outputs
@@ -186,12 +199,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -204,13 +218,13 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream or Class
- Dataprocessing::Crstfilewrapper
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -223,12 +237,13 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -261,18 +276,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elastic_strain_rotation_by_euler_nodes()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/electric_field.py b/src/ansys/dpf/core/operators/result/electric_field.py
index b48f3129682..8ffd71aca7b 100644
--- a/src/ansys/dpf/core/operators/result/electric_field.py
+++ b/src/ansys/dpf/core/operators/result/electric_field.py
@@ -4,91 +4,51 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class electric_field(Operator):
- """Read/compute electric field by calling the readers defined by the
+ r"""Read/compute electric field by calling the readers defined by the
datasources. Regarding the requested location and the input mesh
- scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ scoping, the result location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -183,11 +143,11 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute electric field by calling the readers defined by the
- datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute electric field by calling the readers defined by the
+datasources. Regarding the requested location and the input mesh
+scoping, the result location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -202,111 +162,67 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -314,14 +230,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -330,29 +246,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EF", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElectricField:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElectricField
+ inputs:
+ An instance of InputsElectricField.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElectricField:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElectricField
+ outputs:
+ An instance of OutputsElectricField.
"""
return super().outputs
@@ -417,28 +340,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -451,24 +361,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -481,15 +382,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -502,15 +403,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -523,15 +424,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -544,15 +445,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,15 +466,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -586,15 +487,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -607,15 +508,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -628,20 +529,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -654,21 +550,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -699,18 +589,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.electric_field()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/electric_field_X.py b/src/ansys/dpf/core/operators/result/electric_field_X.py
index 48baca6fa5b..f0a28efd215 100644
--- a/src/ansys/dpf/core/operators/result/electric_field_X.py
+++ b/src/ansys/dpf/core/operators/result/electric_field_X.py
@@ -4,79 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class electric_field_X(Operator):
- """Read/compute electric field X component of the vector (1st component)
- by calling the readers defined by the datasources. Regarding the
- requested location and the input mesh scoping, the result location
- can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute electric field X component of the vector (1st component) by
+ calling the readers defined by the datasources. Regarding the requested
+ location and the input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -165,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute electric field X component of the vector (1st component)
- by calling the readers defined by the datasources.
- Regarding the requested location and the input mesh
- scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute electric field X component of the vector (1st component) by
+calling the readers defined by the datasources. Regarding the requested
+location and the input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -185,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -281,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -297,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EFX", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElectricFieldX:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElectricFieldX
+ inputs:
+ An instance of InputsElectricFieldX.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElectricFieldX:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElectricFieldX
+ outputs:
+ An instance of OutputsElectricFieldX.
"""
return super().outputs
@@ -384,28 +328,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -418,24 +349,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -448,15 +370,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -469,15 +391,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -490,15 +412,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -511,15 +433,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -532,15 +454,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -553,14 +475,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -573,18 +496,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -597,15 +517,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -636,18 +556,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.electric_field_X()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/electric_field_Y.py b/src/ansys/dpf/core/operators/result/electric_field_Y.py
index b501ff24fa4..40e3f753c9a 100644
--- a/src/ansys/dpf/core/operators/result/electric_field_Y.py
+++ b/src/ansys/dpf/core/operators/result/electric_field_Y.py
@@ -4,79 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class electric_field_Y(Operator):
- """Read/compute electric field Y component of the vector (2nd component)
- by calling the readers defined by the datasources. Regarding the
- requested location and the input mesh scoping, the result location
- can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute electric field Y component of the vector (2nd component) by
+ calling the readers defined by the datasources. Regarding the requested
+ location and the input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -165,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute electric field Y component of the vector (2nd component)
- by calling the readers defined by the datasources.
- Regarding the requested location and the input mesh
- scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute electric field Y component of the vector (2nd component) by
+calling the readers defined by the datasources. Regarding the requested
+location and the input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -185,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -281,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -297,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EFY", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElectricFieldY:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElectricFieldY
+ inputs:
+ An instance of InputsElectricFieldY.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElectricFieldY:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElectricFieldY
+ outputs:
+ An instance of OutputsElectricFieldY.
"""
return super().outputs
@@ -384,28 +328,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -418,24 +349,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -448,15 +370,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -469,15 +391,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -490,15 +412,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -511,15 +433,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -532,15 +454,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -553,14 +475,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -573,18 +496,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -597,15 +517,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -636,18 +556,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.electric_field_Y()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/electric_field_Z.py b/src/ansys/dpf/core/operators/result/electric_field_Z.py
index 0dbc2c6e9f2..f694cede4ec 100644
--- a/src/ansys/dpf/core/operators/result/electric_field_Z.py
+++ b/src/ansys/dpf/core/operators/result/electric_field_Z.py
@@ -4,79 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class electric_field_Z(Operator):
- """Read/compute electric field Z component of the vector (3rd component)
- by calling the readers defined by the datasources. Regarding the
- requested location and the input mesh scoping, the result location
- can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute electric field Z component of the vector (3rd component) by
+ calling the readers defined by the datasources. Regarding the requested
+ location and the input mesh scoping, the result location can be
+ Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -165,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute electric field Z component of the vector (3rd component)
- by calling the readers defined by the datasources.
- Regarding the requested location and the input mesh
- scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute electric field Z component of the vector (3rd component) by
+calling the readers defined by the datasources. Regarding the requested
+location and the input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -185,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -281,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -297,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EFZ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElectricFieldZ:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElectricFieldZ
+ inputs:
+ An instance of InputsElectricFieldZ.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElectricFieldZ:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElectricFieldZ
+ outputs:
+ An instance of OutputsElectricFieldZ.
"""
return super().outputs
@@ -384,28 +328,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -418,24 +349,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -448,15 +370,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -469,15 +391,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -490,15 +412,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -511,15 +433,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -532,15 +454,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -553,14 +475,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -573,18 +496,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -597,15 +517,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -636,18 +556,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.electric_field_Z()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/electric_flux_density.py b/src/ansys/dpf/core/operators/result/electric_flux_density.py
index 09cb74cfa86..abc31f5544b 100644
--- a/src/ansys/dpf/core/operators/result/electric_flux_density.py
+++ b/src/ansys/dpf/core/operators/result/electric_flux_density.py
@@ -4,91 +4,51 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class electric_flux_density(Operator):
- """Read/compute Electric flux density by calling the readers defined by
- the datasources. Regarding the requested location and the input
- mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ r"""Read/compute Electric flux density by calling the readers defined by the
+ datasources. Regarding the requested location and the input mesh
+ scoping, the result location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -183,11 +143,11 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute Electric flux density by calling the readers defined by
- the datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute Electric flux density by calling the readers defined by the
+datasources. Regarding the requested location and the input mesh
+scoping, the result location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -202,111 +162,67 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -314,14 +230,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -330,29 +246,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EFD", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElectricFluxDensity:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElectricFluxDensity
+ inputs:
+ An instance of InputsElectricFluxDensity.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElectricFluxDensity:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElectricFluxDensity
+ outputs:
+ An instance of OutputsElectricFluxDensity.
"""
return super().outputs
@@ -435,28 +358,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -469,24 +379,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -499,15 +400,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -520,15 +421,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -541,15 +442,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,15 +463,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -583,15 +484,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -604,15 +505,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -625,15 +526,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -646,20 +547,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -672,21 +568,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -719,18 +609,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.electric_flux_density()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/electric_flux_density_X.py b/src/ansys/dpf/core/operators/result/electric_flux_density_X.py
index ecd132ad55a..541fef0acc0 100644
--- a/src/ansys/dpf/core/operators/result/electric_flux_density_X.py
+++ b/src/ansys/dpf/core/operators/result/electric_flux_density_X.py
@@ -4,79 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class electric_flux_density_X(Operator):
- """Read/compute Electric flux density X component of the vector (1st
- component) by calling the readers defined by the datasources.
- Regarding the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute Electric flux density X component of the vector (1st
+ component) by calling the readers defined by the datasources. Regarding
+ the requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -165,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute Electric flux density X component of the vector (1st
- component) by calling the readers defined by the
- datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute Electric flux density X component of the vector (1st
+component) by calling the readers defined by the datasources. Regarding
+the requested location and the input mesh scoping, the result location
+can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -185,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -281,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -297,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EFDX", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElectricFluxDensityX:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElectricFluxDensityX
+ inputs:
+ An instance of InputsElectricFluxDensityX.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElectricFluxDensityX:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElectricFluxDensityX
+ outputs:
+ An instance of OutputsElectricFluxDensityX.
"""
return super().outputs
@@ -396,28 +340,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -430,24 +361,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -460,15 +382,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -481,15 +403,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -502,15 +424,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -523,15 +445,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -544,15 +466,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,14 +487,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -585,18 +508,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -609,15 +529,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -650,18 +570,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.electric_flux_density_X()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/electric_flux_density_Y.py b/src/ansys/dpf/core/operators/result/electric_flux_density_Y.py
index 4d0c69a5b0d..ce44ca9c65d 100644
--- a/src/ansys/dpf/core/operators/result/electric_flux_density_Y.py
+++ b/src/ansys/dpf/core/operators/result/electric_flux_density_Y.py
@@ -4,79 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class electric_flux_density_Y(Operator):
- """Read/compute Electric flux density Y component of the vector (2nd
- component) by calling the readers defined by the datasources.
- Regarding the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute Electric flux density Y component of the vector (2nd
+ component) by calling the readers defined by the datasources. Regarding
+ the requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -165,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute Electric flux density Y component of the vector (2nd
- component) by calling the readers defined by the
- datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute Electric flux density Y component of the vector (2nd
+component) by calling the readers defined by the datasources. Regarding
+the requested location and the input mesh scoping, the result location
+can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -185,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -281,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -297,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EFDY", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElectricFluxDensityY:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElectricFluxDensityY
+ inputs:
+ An instance of InputsElectricFluxDensityY.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElectricFluxDensityY:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElectricFluxDensityY
+ outputs:
+ An instance of OutputsElectricFluxDensityY.
"""
return super().outputs
@@ -396,28 +340,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -430,24 +361,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -460,15 +382,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -481,15 +403,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -502,15 +424,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -523,15 +445,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -544,15 +466,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,14 +487,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -585,18 +508,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -609,15 +529,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -650,18 +570,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.electric_flux_density_Y()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/electric_flux_density_Z.py b/src/ansys/dpf/core/operators/result/electric_flux_density_Z.py
index 0e0be41756d..b57bbd63da8 100644
--- a/src/ansys/dpf/core/operators/result/electric_flux_density_Z.py
+++ b/src/ansys/dpf/core/operators/result/electric_flux_density_Z.py
@@ -4,79 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class electric_flux_density_Z(Operator):
- """Read/compute Electric flux density Z component of the vector (3rd
- component) by calling the readers defined by the datasources.
- Regarding the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute Electric flux density Z component of the vector (3rd
+ component) by calling the readers defined by the datasources. Regarding
+ the requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -165,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute Electric flux density Z component of the vector (3rd
- component) by calling the readers defined by the
- datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute Electric flux density Z component of the vector (3rd
+component) by calling the readers defined by the datasources. Regarding
+the requested location and the input mesh scoping, the result location
+can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -185,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -281,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -297,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EFDZ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElectricFluxDensityZ:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElectricFluxDensityZ
+ inputs:
+ An instance of InputsElectricFluxDensityZ.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElectricFluxDensityZ:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElectricFluxDensityZ
+ outputs:
+ An instance of OutputsElectricFluxDensityZ.
"""
return super().outputs
@@ -396,28 +340,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -430,24 +361,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -460,15 +382,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -481,15 +403,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -502,15 +424,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -523,15 +445,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -544,15 +466,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,14 +487,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -585,18 +508,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -609,15 +529,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -650,18 +570,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.electric_flux_density_Z()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/electric_potential.py b/src/ansys/dpf/core/operators/result/electric_potential.py
index 73a8a287924..22a808dac96 100644
--- a/src/ansys/dpf/core/operators/result/electric_potential.py
+++ b/src/ansys/dpf/core/operators/result/electric_potential.py
@@ -4,66 +4,42 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class electric_potential(Operator):
- """Read/compute electric Potential by calling the readers defined by the
+ r"""Read/compute electric Potential by calling the readers defined by the
datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -134,9 +110,10 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Read/compute electric Potential by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute electric Potential by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -151,72 +128,43 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
},
map_output_pin_spec={
@@ -224,14 +172,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -240,29 +188,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="VOLT", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElectricPotential:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElectricPotential
+ inputs:
+ An instance of InputsElectricPotential.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElectricPotential:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElectricPotential
+ outputs:
+ An instance of OutputsElectricPotential.
"""
return super().outputs
@@ -315,28 +270,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -349,24 +291,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -379,15 +312,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -400,15 +333,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -421,15 +354,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -442,15 +375,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -463,15 +396,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -502,18 +435,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.electric_potential()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/element_centroids.py b/src/ansys/dpf/core/operators/result/element_centroids.py
index 7ab1504792b..f541d6b0b70 100644
--- a/src/ansys/dpf/core/operators/result/element_centroids.py
+++ b/src/ansys/dpf/core/operators/result/element_centroids.py
@@ -4,66 +4,42 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class element_centroids(Operator):
- """Read/compute coordinate of the elemental centroids by calling the
+ r"""Read/compute coordinate of the elemental centroids by calling the
readers defined by the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -134,9 +110,10 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Read/compute coordinate of the elemental centroids by calling the
- readers defined by the datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute coordinate of the elemental centroids by calling the
+readers defined by the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -151,72 +128,43 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
},
map_output_pin_spec={
@@ -224,14 +172,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -240,29 +188,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="centroids", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementCentroids:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementCentroids
+ inputs:
+ An instance of InputsElementCentroids.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementCentroids:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementCentroids
+ outputs:
+ An instance of OutputsElementCentroids.
"""
return super().outputs
@@ -315,28 +270,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -349,24 +291,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -379,15 +312,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -400,15 +333,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -421,15 +354,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -442,15 +375,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -463,15 +396,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -502,18 +435,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.element_centroids()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/element_nodal_forces.py b/src/ansys/dpf/core/operators/result/element_nodal_forces.py
index cb3e43b5a43..40e60be1916 100644
--- a/src/ansys/dpf/core/operators/result/element_nodal_forces.py
+++ b/src/ansys/dpf/core/operators/result/element_nodal_forces.py
@@ -4,118 +4,61 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class element_nodal_forces(Operator):
- """Read/compute element nodal forces by calling the readers defined by
- the datasources. Regarding the requested location and the input
- mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element nodal forces by calling the readers defined by the
+ datasources. Regarding the requested location and the input mesh
+ scoping, the result location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded, use if cyclic expansion is to
- be done.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
- phi : float, optional
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
- split_force_components : bool, optional
- If this pin is set to true, the output fields
- container splits the enf by degree of
- freedom ("dof" label, 0 for
- translation, 1 for rotation, 2 for
- temperature) and derivative order
- ("derivative_order" label, 0 for
- stiffness terms, 1 for damping terms
- and 2 for inertial terms). default is
- false.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ mesh. If cylic expansion is to be done, mesh of the base sector
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded, use if cyclic expansion is to be done.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
+ phi: float, optional
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+ split_force_components: bool, optional
+ If this pin is set to true, the output fields container splits the ENF by degree of freedom ("dof" label, 0 for translation, 1 for rotation, 2 for temperature) and derivative order ("derivative_order" label, 0 for stiffness terms, 1 for damping terms and 2 for inertial terms). Default is false.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -240,11 +183,11 @@ def __init__(
self.inputs.split_force_components.connect(split_force_components)
@staticmethod
- def _spec():
- description = """Read/compute element nodal forces by calling the readers defined by
- the datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal forces by calling the readers defined by the
+datasources. Regarding the requested location and the input mesh
+scoping, the result location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -259,158 +202,97 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh. if cylic expansion is to be done, mesh
- of the base sector""",
+ document=r"""mesh. If cylic expansion is to be done, mesh of the base sector""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded, use if cyclic expansion is to
- be done.""",
+ document=r"""mesh expanded, use if cyclic expansion is to be done.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.""",
+ document=r"""angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
200: PinSpecification(
name="split_force_components",
type_names=["bool"],
optional=True,
- document="""If this pin is set to true, the output fields
- container splits the enf by degree of
- freedom ("dof" label, 0 for
- translation, 1 for rotation, 2 for
- temperature) and derivative order
- ("derivative_order" label, 0 for
- stiffness terms, 1 for damping terms
- and 2 for inertial terms). default is
- false.""",
+ document=r"""If this pin is set to true, the output fields container splits the ENF by degree of freedom ("dof" label, 0 for translation, 1 for rotation, 2 for temperature) and derivative order ("derivative_order" label, 0 for stiffness terms, 1 for damping terms and 2 for inertial terms). Default is false.""",
),
},
map_output_pin_spec={
@@ -418,14 +300,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -434,29 +316,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ENF", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementNodalForces:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementNodalForces
+ inputs:
+ An instance of InputsElementNodalForces.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementNodalForces:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementNodalForces
+ outputs:
+ An instance of OutputsElementNodalForces.
"""
return super().outputs
@@ -559,28 +448,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._split_force_components)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -593,24 +469,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -623,15 +490,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -644,15 +511,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -665,15 +532,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -686,15 +553,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -707,15 +574,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
+ mesh. If cylic expansion is to be done, mesh of the base sector
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -728,15 +595,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -749,18 +616,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -773,15 +637,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded, use if cyclic expansion is to
- be done.
+ mesh expanded, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -794,17 +658,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -817,15 +679,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -838,15 +700,15 @@ def phi(self):
return self._phi
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -859,20 +721,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -885,21 +742,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -912,22 +763,15 @@ def shell_layer(self):
return self._shell_layer
@property
- def split_force_components(self):
- """Allows to connect split_force_components input to the operator.
-
- If this pin is set to true, the output fields
- container splits the enf by degree of
- freedom ("dof" label, 0 for
- translation, 1 for rotation, 2 for
- temperature) and derivative order
- ("derivative_order" label, 0 for
- stiffness terms, 1 for damping terms
- and 2 for inertial terms). default is
- false.
+ def split_force_components(self) -> Input:
+ r"""Allows to connect split_force_components input to the operator.
- Parameters
- ----------
- my_split_force_components : bool
+ If this pin is set to true, the output fields container splits the ENF by degree of freedom ("dof" label, 0 for translation, 1 for rotation, 2 for temperature) and derivative order ("derivative_order" label, 0 for stiffness terms, 1 for damping terms and 2 for inertial terms). Default is false.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -960,18 +804,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.element_nodal_forces()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/element_orientations.py b/src/ansys/dpf/core/operators/result/element_orientations.py
index 70bb8c5d855..67ec2b88a1b 100644
--- a/src/ansys/dpf/core/operators/result/element_orientations.py
+++ b/src/ansys/dpf/core/operators/result/element_orientations.py
@@ -4,91 +4,51 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class element_orientations(Operator):
- """Read/compute element euler angles by calling the readers defined by
- the datasources. Regarding the requested location and the input
- mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element euler angles by calling the readers defined by the
+ datasources. Regarding the requested location and the input mesh
+ scoping, the result location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -183,11 +143,11 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element euler angles by calling the readers defined by
- the datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element euler angles by calling the readers defined by the
+datasources. Regarding the requested location and the input mesh
+scoping, the result location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -202,111 +162,67 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -314,14 +230,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -330,29 +246,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EUL", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementOrientations:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementOrientations
+ inputs:
+ An instance of InputsElementOrientations.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementOrientations:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementOrientations
+ outputs:
+ An instance of OutputsElementOrientations.
"""
return super().outputs
@@ -427,28 +350,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -461,24 +371,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -491,15 +392,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -512,15 +413,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -533,15 +434,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -554,15 +455,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -575,15 +476,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -596,15 +497,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -617,15 +518,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -638,20 +539,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -664,21 +560,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -711,18 +601,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.element_orientations()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/element_orientations_X.py b/src/ansys/dpf/core/operators/result/element_orientations_X.py
index 125e6e6b351..9a5aca690b4 100644
--- a/src/ansys/dpf/core/operators/result/element_orientations_X.py
+++ b/src/ansys/dpf/core/operators/result/element_orientations_X.py
@@ -4,79 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class element_orientations_X(Operator):
- """Read/compute element euler angles X component of the vector (1st
- component) by calling the readers defined by the datasources.
- Regarding the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element euler angles X component of the vector (1st
+ component) by calling the readers defined by the datasources. Regarding
+ the requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -165,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute element euler angles X component of the vector (1st
- component) by calling the readers defined by the
- datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element euler angles X component of the vector (1st
+component) by calling the readers defined by the datasources. Regarding
+the requested location and the input mesh scoping, the result location
+can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -185,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -281,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -297,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EULX", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementOrientationsX:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementOrientationsX
+ inputs:
+ An instance of InputsElementOrientationsX.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementOrientationsX:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementOrientationsX
+ outputs:
+ An instance of OutputsElementOrientationsX.
"""
return super().outputs
@@ -396,28 +340,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -430,24 +361,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -460,15 +382,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -481,15 +403,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -502,15 +424,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -523,15 +445,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -544,15 +466,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,14 +487,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -585,18 +508,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -609,15 +529,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -650,18 +570,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.element_orientations_X()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/element_orientations_Y.py b/src/ansys/dpf/core/operators/result/element_orientations_Y.py
index b15bf817dd6..cac1e2f2194 100644
--- a/src/ansys/dpf/core/operators/result/element_orientations_Y.py
+++ b/src/ansys/dpf/core/operators/result/element_orientations_Y.py
@@ -4,79 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class element_orientations_Y(Operator):
- """Read/compute element euler angles Y component of the vector (2nd
- component) by calling the readers defined by the datasources.
- Regarding the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element euler angles Y component of the vector (2nd
+ component) by calling the readers defined by the datasources. Regarding
+ the requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -165,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute element euler angles Y component of the vector (2nd
- component) by calling the readers defined by the
- datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element euler angles Y component of the vector (2nd
+component) by calling the readers defined by the datasources. Regarding
+the requested location and the input mesh scoping, the result location
+can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -185,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -281,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -297,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EULY", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementOrientationsY:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementOrientationsY
+ inputs:
+ An instance of InputsElementOrientationsY.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementOrientationsY:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementOrientationsY
+ outputs:
+ An instance of OutputsElementOrientationsY.
"""
return super().outputs
@@ -396,28 +340,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -430,24 +361,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -460,15 +382,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -481,15 +403,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -502,15 +424,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -523,15 +445,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -544,15 +466,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,14 +487,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -585,18 +508,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -609,15 +529,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -650,18 +570,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.element_orientations_Y()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/element_orientations_Z.py b/src/ansys/dpf/core/operators/result/element_orientations_Z.py
index 840b8e83e43..22ed070084a 100644
--- a/src/ansys/dpf/core/operators/result/element_orientations_Z.py
+++ b/src/ansys/dpf/core/operators/result/element_orientations_Z.py
@@ -4,79 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class element_orientations_Z(Operator):
- """Read/compute element euler angles Z component of the vector (3rd
- component) by calling the readers defined by the datasources.
- Regarding the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute element euler angles Z component of the vector (3rd
+ component) by calling the readers defined by the datasources. Regarding
+ the requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -165,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute element euler angles Z component of the vector (3rd
- component) by calling the readers defined by the
- datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element euler angles Z component of the vector (3rd
+component) by calling the readers defined by the datasources. Regarding
+the requested location and the input mesh scoping, the result location
+can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -185,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -281,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -297,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EULZ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementOrientationsZ:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementOrientationsZ
+ inputs:
+ An instance of InputsElementOrientationsZ.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementOrientationsZ:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementOrientationsZ
+ outputs:
+ An instance of OutputsElementOrientationsZ.
"""
return super().outputs
@@ -396,28 +340,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -430,24 +361,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -460,15 +382,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -481,15 +403,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -502,15 +424,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -523,15 +445,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -544,15 +466,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,14 +487,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -585,18 +508,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -609,15 +529,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -650,18 +570,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.element_orientations_Z()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elemental_heat_generation.py b/src/ansys/dpf/core/operators/result/elemental_heat_generation.py
index 78e55e065e3..dfef170bc48 100644
--- a/src/ansys/dpf/core/operators/result/elemental_heat_generation.py
+++ b/src/ansys/dpf/core/operators/result/elemental_heat_generation.py
@@ -4,66 +4,42 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elemental_heat_generation(Operator):
- """Read/compute Elemental Heat Generation by calling the readers defined
- by the datasources.
+ r"""Read/compute Elemental Heat Generation by calling the readers defined by
+ the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -134,9 +110,10 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Read/compute Elemental Heat Generation by calling the readers defined
- by the datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute Elemental Heat Generation by calling the readers defined by
+the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -151,72 +128,43 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
},
map_output_pin_spec={
@@ -224,14 +172,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -240,29 +188,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EHC", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalHeatGeneration:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalHeatGeneration
+ inputs:
+ An instance of InputsElementalHeatGeneration.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalHeatGeneration:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalHeatGeneration
+ outputs:
+ An instance of OutputsElementalHeatGeneration.
"""
return super().outputs
@@ -321,28 +276,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -355,24 +297,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -385,15 +318,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -406,15 +339,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -427,15 +360,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -448,15 +381,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -469,15 +402,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -510,18 +443,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elemental_heat_generation()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elemental_mass.py b/src/ansys/dpf/core/operators/result/elemental_mass.py
index 4400e850a6d..e72069856a5 100644
--- a/src/ansys/dpf/core/operators/result/elemental_mass.py
+++ b/src/ansys/dpf/core/operators/result/elemental_mass.py
@@ -4,66 +4,42 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elemental_mass(Operator):
- """Read/compute element mass by calling the readers defined by the
+ r"""Read/compute element mass by calling the readers defined by the
datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -134,9 +110,10 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Read/compute element mass by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element mass by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -151,72 +128,43 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
},
map_output_pin_spec={
@@ -224,14 +172,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -240,29 +188,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ElementalMass", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalMass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalMass
+ inputs:
+ An instance of InputsElementalMass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalMass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalMass
+ outputs:
+ An instance of OutputsElementalMass.
"""
return super().outputs
@@ -311,28 +266,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -345,24 +287,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -375,15 +308,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -396,15 +329,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -417,15 +350,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -438,15 +371,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -459,15 +392,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -498,18 +431,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elemental_mass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/elemental_volume.py b/src/ansys/dpf/core/operators/result/elemental_volume.py
index 8f1dfe912b9..7666a646368 100644
--- a/src/ansys/dpf/core/operators/result/elemental_volume.py
+++ b/src/ansys/dpf/core/operators/result/elemental_volume.py
@@ -4,83 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class elemental_volume(Operator):
- """Read/compute element volume by calling the readers defined by the
+ r"""Read/compute element volume by calling the readers defined by the
datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded, use if cyclic expansion is to
- be done.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
- phi : float, optional
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ mesh. If cylic expansion is to be done, mesh of the base sector
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded, use if cyclic expansion is to be done.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
+ phi: float, optional
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -175,9 +142,10 @@ def __init__(
self.inputs.phi.connect(phi)
@staticmethod
- def _spec():
- description = """Read/compute element volume by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element volume by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -192,105 +160,67 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh. if cylic expansion is to be done, mesh
- of the base sector""",
+ document=r"""mesh. If cylic expansion is to be done, mesh of the base sector""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded, use if cyclic expansion is to
- be done.""",
+ document=r"""mesh expanded, use if cyclic expansion is to be done.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.""",
+ document=r"""angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.""",
),
},
map_output_pin_spec={
@@ -298,14 +228,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -314,29 +244,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ENG_VOL", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsElementalVolume:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsElementalVolume
+ inputs:
+ An instance of InputsElementalVolume.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsElementalVolume:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsElementalVolume
+ outputs:
+ An instance of OutputsElementalVolume.
"""
return super().outputs
@@ -407,28 +344,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._phi)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -441,24 +365,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -471,15 +386,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -492,15 +407,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -513,15 +428,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -534,15 +449,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -555,15 +470,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
+ mesh. If cylic expansion is to be done, mesh of the base sector
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -576,18 +491,15 @@ def mesh(self):
return self._mesh
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -600,15 +512,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded, use if cyclic expansion is to
- be done.
+ mesh expanded, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -621,17 +533,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -644,15 +554,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -683,18 +593,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.elemental_volume()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/enf_rotation_by_euler_nodes.py b/src/ansys/dpf/core/operators/result/enf_rotation_by_euler_nodes.py
index 1936d0b7d88..214edca69e0 100644
--- a/src/ansys/dpf/core/operators/result/enf_rotation_by_euler_nodes.py
+++ b/src/ansys/dpf/core/operators/result/enf_rotation_by_euler_nodes.py
@@ -4,27 +4,32 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class enf_rotation_by_euler_nodes(Operator):
- """read Euler angles on elements from the result file and rotate the
- fields in the fieldsContainer.
+ r"""read Euler angles on elements from the result file and rotate the fields
+ in the fieldsContainer.
+
Parameters
----------
- fields_container : FieldsContainer, optional
- streams_container : StreamsContainer or Stream or Class
+ fields_container: FieldsContainer, optional
+ streams_container: StreamsContainer or Stream or Class
Dataprocessing::Crstfilewrapper, optional
- data_sources : DataSources
+ data_sources: DataSources
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -73,9 +78,10 @@ def __init__(
self.inputs.data_sources.connect(data_sources)
@staticmethod
- def _spec():
- description = """read Euler angles on elements from the result file and rotate the
- fields in the fieldsContainer."""
+ def _spec() -> Specification:
+ description = r"""read Euler angles on elements from the result file and rotate the fields
+in the fieldsContainer.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -83,7 +89,7 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""""",
+ document=r"""""",
),
3: PinSpecification(
name="streams_container",
@@ -93,13 +99,13 @@ def _spec():
"class dataProcessing::CRstFileWrapper",
],
optional=True,
- document="""""",
+ document=r"""""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -107,14 +113,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -123,31 +129,38 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(
name="ENF_rotation_by_euler_nodes", server=server
)
@property
- def inputs(self):
+ def inputs(self) -> InputsEnfRotationByEulerNodes:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsEnfRotationByEulerNodes
+ inputs:
+ An instance of InputsEnfRotationByEulerNodes.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsEnfRotationByEulerNodes:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsEnfRotationByEulerNodes
+ outputs:
+ An instance of OutputsEnfRotationByEulerNodes.
"""
return super().outputs
@@ -184,12 +197,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._data_sources)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -202,13 +216,13 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream or Class
- Dataprocessing::Crstfilewrapper
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -221,12 +235,13 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -259,18 +274,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.enf_rotation_by_euler_nodes()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/enthalpy.py b/src/ansys/dpf/core/operators/result/enthalpy.py
index c1c2259dfe3..b798fcc069b 100644
--- a/src/ansys/dpf/core/operators/result/enthalpy.py
+++ b/src/ansys/dpf/core/operators/result/enthalpy.py
@@ -4,73 +4,43 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class enthalpy(Operator):
- """Read Enthalpy by calling the readers defined by the datasources.
+ r"""Read Enthalpy by calling the readers defined by the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- region_scoping : Scoping or int, optional
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
- qualifiers1 : dict, optional
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
- qualifiers2 : dict, optional
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ region_scoping: Scoping or int, optional
+ region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).
+ qualifiers1: dict, optional
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
+ qualifiers2: dict, optional
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -147,10 +117,9 @@ def __init__(
self.inputs.qualifiers2.connect(qualifiers2)
@staticmethod
- def _spec():
- description = (
- """Read Enthalpy by calling the readers defined by the datasources."""
- )
+ def _spec() -> Specification:
+ description = r"""Read Enthalpy by calling the readers defined by the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -165,84 +134,49 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
25: PinSpecification(
name="region_scoping",
type_names=["scoping", "int32", "vector"],
optional=True,
- document="""Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).""",
+ document=r"""region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).""",
),
1000: PinSpecification(
name="qualifiers",
type_names=["label_space"],
optional=True,
- document="""(for fluid results only) labelspace with
- combination of zone, phases or
- species ids""",
+ document=r"""(for Fluid results only) LabelSpace with combination of zone, phases or species ids""",
),
1001: PinSpecification(
name="qualifiers",
type_names=["label_space"],
optional=True,
- document="""(for fluid results only) labelspace with
- combination of zone, phases or
- species ids""",
+ document=r"""(for Fluid results only) LabelSpace with combination of zone, phases or species ids""",
),
},
map_output_pin_spec={
@@ -250,14 +184,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -266,29 +200,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="H_S", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsEnthalpy:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsEnthalpy
+ inputs:
+ An instance of InputsEnthalpy.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsEnthalpy:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsEnthalpy
+ outputs:
+ An instance of OutputsEnthalpy.
"""
return super().outputs
@@ -339,28 +280,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._qualifiers2)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -373,24 +301,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -403,15 +322,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -424,15 +343,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -445,15 +364,15 @@ def data_sources(self):
return self._data_sources
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -466,18 +385,15 @@ def mesh(self):
return self._mesh
@property
- def region_scoping(self):
- """Allows to connect region_scoping input to the operator.
+ def region_scoping(self) -> Input:
+ r"""Allows to connect region_scoping input to the operator.
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
+ region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).
- Parameters
- ----------
- my_region_scoping : Scoping or int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -490,16 +406,15 @@ def region_scoping(self):
return self._region_scoping
@property
- def qualifiers1(self):
- """Allows to connect qualifiers1 input to the operator.
+ def qualifiers1(self) -> Input:
+ r"""Allows to connect qualifiers1 input to the operator.
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
- Parameters
- ----------
- my_qualifiers1 : dict
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -512,16 +427,15 @@ def qualifiers1(self):
return self._qualifiers1
@property
- def qualifiers2(self):
- """Allows to connect qualifiers2 input to the operator.
+ def qualifiers2(self) -> Input:
+ r"""Allows to connect qualifiers2 input to the operator.
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
- Parameters
- ----------
- my_qualifiers2 : dict
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -552,18 +466,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.enthalpy()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/entropy.py b/src/ansys/dpf/core/operators/result/entropy.py
index 874e9cce1e0..be92a97d5d1 100644
--- a/src/ansys/dpf/core/operators/result/entropy.py
+++ b/src/ansys/dpf/core/operators/result/entropy.py
@@ -4,73 +4,43 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class entropy(Operator):
- """Read Entropy by calling the readers defined by the datasources.
+ r"""Read Entropy by calling the readers defined by the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- region_scoping : Scoping or int, optional
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
- qualifiers1 : dict, optional
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
- qualifiers2 : dict, optional
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ region_scoping: Scoping or int, optional
+ region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).
+ qualifiers1: dict, optional
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
+ qualifiers2: dict, optional
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -147,10 +117,9 @@ def __init__(
self.inputs.qualifiers2.connect(qualifiers2)
@staticmethod
- def _spec():
- description = (
- """Read Entropy by calling the readers defined by the datasources."""
- )
+ def _spec() -> Specification:
+ description = r"""Read Entropy by calling the readers defined by the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -165,84 +134,49 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
25: PinSpecification(
name="region_scoping",
type_names=["scoping", "int32", "vector"],
optional=True,
- document="""Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).""",
+ document=r"""region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).""",
),
1000: PinSpecification(
name="qualifiers",
type_names=["label_space"],
optional=True,
- document="""(for fluid results only) labelspace with
- combination of zone, phases or
- species ids""",
+ document=r"""(for Fluid results only) LabelSpace with combination of zone, phases or species ids""",
),
1001: PinSpecification(
name="qualifiers",
type_names=["label_space"],
optional=True,
- document="""(for fluid results only) labelspace with
- combination of zone, phases or
- species ids""",
+ document=r"""(for Fluid results only) LabelSpace with combination of zone, phases or species ids""",
),
},
map_output_pin_spec={
@@ -250,14 +184,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -266,29 +200,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="S_S", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsEntropy:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsEntropy
+ inputs:
+ An instance of InputsEntropy.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsEntropy:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsEntropy
+ outputs:
+ An instance of OutputsEntropy.
"""
return super().outputs
@@ -339,28 +280,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._qualifiers2)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -373,24 +301,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -403,15 +322,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -424,15 +343,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -445,15 +364,15 @@ def data_sources(self):
return self._data_sources
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -466,18 +385,15 @@ def mesh(self):
return self._mesh
@property
- def region_scoping(self):
- """Allows to connect region_scoping input to the operator.
+ def region_scoping(self) -> Input:
+ r"""Allows to connect region_scoping input to the operator.
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
+ region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).
- Parameters
- ----------
- my_region_scoping : Scoping or int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -490,16 +406,15 @@ def region_scoping(self):
return self._region_scoping
@property
- def qualifiers1(self):
- """Allows to connect qualifiers1 input to the operator.
+ def qualifiers1(self) -> Input:
+ r"""Allows to connect qualifiers1 input to the operator.
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
- Parameters
- ----------
- my_qualifiers1 : dict
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -512,16 +427,15 @@ def qualifiers1(self):
return self._qualifiers1
@property
- def qualifiers2(self):
- """Allows to connect qualifiers2 input to the operator.
+ def qualifiers2(self) -> Input:
+ r"""Allows to connect qualifiers2 input to the operator.
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
- Parameters
- ----------
- my_qualifiers2 : dict
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -552,18 +466,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.entropy()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/epsilon.py b/src/ansys/dpf/core/operators/result/epsilon.py
index 32e2768975a..1230f71127a 100644
--- a/src/ansys/dpf/core/operators/result/epsilon.py
+++ b/src/ansys/dpf/core/operators/result/epsilon.py
@@ -4,74 +4,44 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class epsilon(Operator):
- """Read Turbulent Dissipation Rate (epsilon) by calling the readers
- defined by the datasources.
+ r"""Read Turbulent Dissipation Rate (epsilon) by calling the readers defined
+ by the datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- region_scoping : Scoping or int, optional
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
- qualifiers1 : dict, optional
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
- qualifiers2 : dict, optional
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ region_scoping: Scoping or int, optional
+ region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).
+ qualifiers1: dict, optional
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
+ qualifiers2: dict, optional
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -148,9 +118,10 @@ def __init__(
self.inputs.qualifiers2.connect(qualifiers2)
@staticmethod
- def _spec():
- description = """Read Turbulent Dissipation Rate (epsilon) by calling the readers
- defined by the datasources."""
+ def _spec() -> Specification:
+ description = r"""Read Turbulent Dissipation Rate (epsilon) by calling the readers defined
+by the datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -165,84 +136,49 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
25: PinSpecification(
name="region_scoping",
type_names=["scoping", "int32", "vector"],
optional=True,
- document="""Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).""",
+ document=r"""region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).""",
),
1000: PinSpecification(
name="qualifiers",
type_names=["label_space"],
optional=True,
- document="""(for fluid results only) labelspace with
- combination of zone, phases or
- species ids""",
+ document=r"""(for Fluid results only) LabelSpace with combination of zone, phases or species ids""",
),
1001: PinSpecification(
name="qualifiers",
type_names=["label_space"],
optional=True,
- document="""(for fluid results only) labelspace with
- combination of zone, phases or
- species ids""",
+ document=r"""(for Fluid results only) LabelSpace with combination of zone, phases or species ids""",
),
},
map_output_pin_spec={
@@ -250,14 +186,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -266,29 +202,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="EPS", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsEpsilon:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsEpsilon
+ inputs:
+ An instance of InputsEpsilon.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsEpsilon:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsEpsilon
+ outputs:
+ An instance of OutputsEpsilon.
"""
return super().outputs
@@ -339,28 +282,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._qualifiers2)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -373,24 +303,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -403,15 +324,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -424,15 +345,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -445,15 +366,15 @@ def data_sources(self):
return self._data_sources
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -466,18 +387,15 @@ def mesh(self):
return self._mesh
@property
- def region_scoping(self):
- """Allows to connect region_scoping input to the operator.
+ def region_scoping(self) -> Input:
+ r"""Allows to connect region_scoping input to the operator.
- Region id (integer) or vector of region ids
- (vector) or region scoping (scoping)
- of the model (region corresponds to
- zone for fluid results or part for
- lsdyna results).
+ region id (integer) or vector of region ids (vector) or region scoping (scoping) of the model (region corresponds to zone for Fluid results or part for LSDyna results).
- Parameters
- ----------
- my_region_scoping : Scoping or int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -490,16 +408,15 @@ def region_scoping(self):
return self._region_scoping
@property
- def qualifiers1(self):
- """Allows to connect qualifiers1 input to the operator.
+ def qualifiers1(self) -> Input:
+ r"""Allows to connect qualifiers1 input to the operator.
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
- Parameters
- ----------
- my_qualifiers1 : dict
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -512,16 +429,15 @@ def qualifiers1(self):
return self._qualifiers1
@property
- def qualifiers2(self):
- """Allows to connect qualifiers2 input to the operator.
+ def qualifiers2(self) -> Input:
+ r"""Allows to connect qualifiers2 input to the operator.
- (for fluid results only) labelspace with
- combination of zone, phases or
- species ids
+ (for Fluid results only) LabelSpace with combination of zone, phases or species ids
- Parameters
- ----------
- my_qualifiers2 : dict
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -552,18 +468,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.epsilon()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/equivalent_mass.py b/src/ansys/dpf/core/operators/result/equivalent_mass.py
index 0e2e64b392b..a715de2a467 100644
--- a/src/ansys/dpf/core/operators/result/equivalent_mass.py
+++ b/src/ansys/dpf/core/operators/result/equivalent_mass.py
@@ -4,82 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class equivalent_mass(Operator):
- """Read/compute equivalent dof mass by calling the readers defined by the
+ r"""Read/compute equivalent dof mass by calling the readers defined by the
datasources.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- all_dofs : bool, optional
- Default is false.
- mesh : MeshedRegion or MeshesContainer, optional
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- expanded_meshed_region : MeshedRegion or MeshesContainer, optional
- Mesh expanded, use if cyclic expansion is to
- be done.
- sectors_to_expand : Scoping or ScopingsContainer, optional
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
- phi : float, optional
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ all_dofs: bool, optional
+ default is false.
+ mesh: MeshedRegion or MeshesContainer, optional
+ mesh. If cylic expansion is to be done, mesh of the base sector
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ expanded_meshed_region: MeshedRegion or MeshesContainer, optional
+ mesh expanded, use if cyclic expansion is to be done.
+ sectors_to_expand: Scoping or ScopingsContainer, optional
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
+ phi: float, optional
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -174,9 +142,10 @@ def __init__(
self.inputs.phi.connect(phi)
@staticmethod
- def _spec():
- description = """Read/compute equivalent dof mass by calling the readers defined by the
- datasources."""
+ def _spec() -> Specification:
+ description = r"""Read/compute equivalent dof mass by calling the readers defined by the
+datasources.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -191,104 +160,67 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
6: PinSpecification(
name="all_dofs",
type_names=["bool"],
optional=True,
- document="""Default is false.""",
+ document=r"""default is false.""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh. if cylic expansion is to be done, mesh
- of the base sector""",
+ document=r"""mesh. If cylic expansion is to be done, mesh of the base sector""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
15: PinSpecification(
name="expanded_meshed_region",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Mesh expanded, use if cyclic expansion is to
- be done.""",
+ document=r"""mesh expanded, use if cyclic expansion is to be done.""",
),
18: PinSpecification(
name="sectors_to_expand",
type_names=["vector", "scoping", "scopings_container"],
optional=True,
- document="""Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.""",
+ document=r"""sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.""",
),
19: PinSpecification(
name="phi",
type_names=["double"],
optional=True,
- document="""Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.""",
+ document=r"""angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.""",
),
},
map_output_pin_spec={
@@ -296,14 +228,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -312,29 +244,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="equivalent_mass", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsEquivalentMass:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsEquivalentMass
+ inputs:
+ An instance of InputsEquivalentMass.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsEquivalentMass:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsEquivalentMass
+ outputs:
+ An instance of OutputsEquivalentMass.
"""
return super().outputs
@@ -401,28 +340,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._phi)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -435,24 +361,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -465,15 +382,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -486,15 +403,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -507,15 +424,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -528,14 +445,15 @@ def data_sources(self):
return self._data_sources
@property
- def all_dofs(self):
- """Allows to connect all_dofs input to the operator.
+ def all_dofs(self) -> Input:
+ r"""Allows to connect all_dofs input to the operator.
- Default is false.
+ default is false.
- Parameters
- ----------
- my_all_dofs : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -548,15 +466,15 @@ def all_dofs(self):
return self._all_dofs
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Mesh. if cylic expansion is to be done, mesh
- of the base sector
+ mesh. If cylic expansion is to be done, mesh of the base sector
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -569,18 +487,15 @@ def mesh(self):
return self._mesh
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -593,15 +508,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def expanded_meshed_region(self):
- """Allows to connect expanded_meshed_region input to the operator.
+ def expanded_meshed_region(self) -> Input:
+ r"""Allows to connect expanded_meshed_region input to the operator.
- Mesh expanded, use if cyclic expansion is to
- be done.
+ mesh expanded, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_expanded_meshed_region : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -614,17 +529,15 @@ def expanded_meshed_region(self):
return self._expanded_meshed_region
@property
- def sectors_to_expand(self):
- """Allows to connect sectors_to_expand input to the operator.
+ def sectors_to_expand(self) -> Input:
+ r"""Allows to connect sectors_to_expand input to the operator.
- Sectors to expand (start at 0), for
- multistage: use scopings container
- with 'stage' label, use if cyclic
- expansion is to be done.
+ sectors to expand (start at 0), for multistage: use scopings container with 'stage' label, use if cyclic expansion is to be done.
- Parameters
- ----------
- my_sectors_to_expand : Scoping or ScopingsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -637,15 +550,15 @@ def sectors_to_expand(self):
return self._sectors_to_expand
@property
- def phi(self):
- """Allows to connect phi input to the operator.
+ def phi(self) -> Input:
+ r"""Allows to connect phi input to the operator.
- Angle phi in degrees (default value 0.0), use
- if cyclic expansion is to be done.
+ angle phi in degrees (default value 0.0), use if cyclic expansion is to be done.
- Parameters
- ----------
- my_phi : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -676,18 +589,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.equivalent_mass()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/equivalent_radiated_power.py b/src/ansys/dpf/core/operators/result/equivalent_radiated_power.py
index 9bcf736e5c6..3455556a362 100644
--- a/src/ansys/dpf/core/operators/result/equivalent_radiated_power.py
+++ b/src/ansys/dpf/core/operators/result/equivalent_radiated_power.py
@@ -4,51 +4,43 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class equivalent_radiated_power(Operator):
- """Compute the Equivalent Radiated Power (ERP)
+ r"""Compute the Equivalent Radiated Power (ERP)
+
Parameters
----------
- fields_container : FieldsContainer
- The input field container expects
- displacements fields
- mesh : MeshedRegion or MeshesContainer
- The mesh region in this pin has to be
- boundary or skin mesh
- time_scoping : int or Scoping
- Load step number (if it's specified, the erp
- is computed only on the substeps of
- this step) or time scoping
- mass_density : float
- Mass density (if it's not specified, default
- value of the air is applied).
- speed_of_sound : float
- Speed of sound (if it's not specified,
- default value of the speed of sound
- in the air is applied).
- erp_type : int
- If this pin is set to 0, the classical erp is
- computed, 1 the corrected erp is
- computed (a mesh of one face has to
- be given in the pin 1) and 2 the
- enhanced erp is computed. default is
- 0.
- boolean : bool
- If this pin is set to true, the erp level in
- db is computed
- factor : float
- Erp reference value. default is 1e-12
+ fields_container: FieldsContainer
+ the input field container expects displacements fields
+ mesh: MeshedRegion or MeshesContainer
+ the mesh region in this pin has to be boundary or skin mesh
+ time_scoping: int or Scoping
+ load step number (if it's specified, the ERP is computed only on the substeps of this step) or time scoping
+ mass_density: float
+ mass density (if it's not specified, default value of the air is applied).
+ speed_of_sound: float
+ speed of sound (if it's not specified, default value of the speed of sound in the air is applied).
+ erp_type: int
+ if this pin is set to 0, the classical ERP is computed, 1 the corrected ERP is computed (a mesh of one face has to be given in the pin 1) and 2 the enhanced ERP is computed. Default is 0.
+ boolean: bool
+ if this pin is set to true, the ERP level in dB is computed
+ factor: float
+ erp reference value. Default is 1E-12
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -125,8 +117,9 @@ def __init__(
self.inputs.factor.connect(factor)
@staticmethod
- def _spec():
- description = """Compute the Equivalent Radiated Power (ERP)"""
+ def _spec() -> Specification:
+ description = r"""Compute the Equivalent Radiated Power (ERP)
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -134,62 +127,49 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The input field container expects
- displacements fields""",
+ document=r"""the input field container expects displacements fields""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=False,
- document="""The mesh region in this pin has to be
- boundary or skin mesh""",
+ document=r"""the mesh region in this pin has to be boundary or skin mesh""",
),
2: PinSpecification(
name="time_scoping",
type_names=["int32", "vector", "scoping"],
optional=False,
- document="""Load step number (if it's specified, the erp
- is computed only on the substeps of
- this step) or time scoping""",
+ document=r"""load step number (if it's specified, the ERP is computed only on the substeps of this step) or time scoping""",
),
3: PinSpecification(
name="mass_density",
type_names=["double"],
optional=False,
- document="""Mass density (if it's not specified, default
- value of the air is applied).""",
+ document=r"""mass density (if it's not specified, default value of the air is applied).""",
),
4: PinSpecification(
name="speed_of_sound",
type_names=["double"],
optional=False,
- document="""Speed of sound (if it's not specified,
- default value of the speed of sound
- in the air is applied).""",
+ document=r"""speed of sound (if it's not specified, default value of the speed of sound in the air is applied).""",
),
5: PinSpecification(
name="erp_type",
type_names=["int32"],
optional=False,
- document="""If this pin is set to 0, the classical erp is
- computed, 1 the corrected erp is
- computed (a mesh of one face has to
- be given in the pin 1) and 2 the
- enhanced erp is computed. default is
- 0.""",
+ document=r"""if this pin is set to 0, the classical ERP is computed, 1 the corrected ERP is computed (a mesh of one face has to be given in the pin 1) and 2 the enhanced ERP is computed. Default is 0.""",
),
6: PinSpecification(
name="boolean",
type_names=["bool"],
optional=False,
- document="""If this pin is set to true, the erp level in
- db is computed""",
+ document=r"""if this pin is set to true, the ERP level in dB is computed""",
),
7: PinSpecification(
name="factor",
type_names=["double"],
optional=False,
- document="""Erp reference value. default is 1e-12""",
+ document=r"""erp reference value. Default is 1E-12""",
),
},
map_output_pin_spec={
@@ -197,14 +177,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -213,29 +193,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ERP", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsEquivalentRadiatedPower:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsEquivalentRadiatedPower
+ inputs:
+ An instance of InputsEquivalentRadiatedPower.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsEquivalentRadiatedPower:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsEquivalentRadiatedPower
+ outputs:
+ An instance of OutputsEquivalentRadiatedPower.
"""
return super().outputs
@@ -296,15 +283,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._factor)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- The input field container expects
- displacements fields
+ the input field container expects displacements fields
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -317,15 +304,15 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- The mesh region in this pin has to be
- boundary or skin mesh
+ the mesh region in this pin has to be boundary or skin mesh
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -338,16 +325,15 @@ def mesh(self):
return self._mesh
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Load step number (if it's specified, the erp
- is computed only on the substeps of
- this step) or time scoping
+ load step number (if it's specified, the ERP is computed only on the substeps of this step) or time scoping
- Parameters
- ----------
- my_time_scoping : int or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -360,15 +346,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mass_density(self):
- """Allows to connect mass_density input to the operator.
+ def mass_density(self) -> Input:
+ r"""Allows to connect mass_density input to the operator.
- Mass density (if it's not specified, default
- value of the air is applied).
+ mass density (if it's not specified, default value of the air is applied).
- Parameters
- ----------
- my_mass_density : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -381,16 +367,15 @@ def mass_density(self):
return self._mass_density
@property
- def speed_of_sound(self):
- """Allows to connect speed_of_sound input to the operator.
+ def speed_of_sound(self) -> Input:
+ r"""Allows to connect speed_of_sound input to the operator.
- Speed of sound (if it's not specified,
- default value of the speed of sound
- in the air is applied).
+ speed of sound (if it's not specified, default value of the speed of sound in the air is applied).
- Parameters
- ----------
- my_speed_of_sound : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -403,19 +388,15 @@ def speed_of_sound(self):
return self._speed_of_sound
@property
- def erp_type(self):
- """Allows to connect erp_type input to the operator.
+ def erp_type(self) -> Input:
+ r"""Allows to connect erp_type input to the operator.
- If this pin is set to 0, the classical erp is
- computed, 1 the corrected erp is
- computed (a mesh of one face has to
- be given in the pin 1) and 2 the
- enhanced erp is computed. default is
- 0.
+ if this pin is set to 0, the classical ERP is computed, 1 the corrected ERP is computed (a mesh of one face has to be given in the pin 1) and 2 the enhanced ERP is computed. Default is 0.
- Parameters
- ----------
- my_erp_type : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -428,15 +409,15 @@ def erp_type(self):
return self._erp_type
@property
- def boolean(self):
- """Allows to connect boolean input to the operator.
+ def boolean(self) -> Input:
+ r"""Allows to connect boolean input to the operator.
- If this pin is set to true, the erp level in
- db is computed
+ if this pin is set to true, the ERP level in dB is computed
- Parameters
- ----------
- my_boolean : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -449,14 +430,15 @@ def boolean(self):
return self._boolean
@property
- def factor(self):
- """Allows to connect factor input to the operator.
+ def factor(self) -> Input:
+ r"""Allows to connect factor input to the operator.
- Erp reference value. default is 1e-12
+ erp reference value. Default is 1E-12
- Parameters
- ----------
- my_factor : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -489,18 +471,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.equivalent_radiated_power()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/eqv_stress_parameter.py b/src/ansys/dpf/core/operators/result/eqv_stress_parameter.py
index 9eb6c55c73e..cd7e7c6af24 100644
--- a/src/ansys/dpf/core/operators/result/eqv_stress_parameter.py
+++ b/src/ansys/dpf/core/operators/result/eqv_stress_parameter.py
@@ -4,91 +4,52 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class eqv_stress_parameter(Operator):
- """Read/compute element nodal equivalent stress parameter by calling the
- readers defined by the datasources. Regarding the requested
- location and the input mesh scoping, the result location can be
+ r"""Read/compute element nodal equivalent stress parameter by calling the
+ readers defined by the datasources. Regarding the requested location and
+ the input mesh scoping, the result location can be
Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -183,11 +144,12 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute element nodal equivalent stress parameter by calling the
- readers defined by the datasources. Regarding the
- requested location and the input mesh scoping, the result
- location can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute element nodal equivalent stress parameter by calling the
+readers defined by the datasources. Regarding the requested location and
+the input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -202,111 +164,67 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -314,14 +232,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -330,29 +248,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="ENL_SEPL", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsEqvStressParameter:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsEqvStressParameter
+ inputs:
+ An instance of InputsEqvStressParameter.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsEqvStressParameter:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsEqvStressParameter
+ outputs:
+ An instance of OutputsEqvStressParameter.
"""
return super().outputs
@@ -427,28 +352,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -461,24 +373,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -491,15 +394,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -512,15 +415,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -533,15 +436,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -554,15 +457,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -575,15 +478,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -596,15 +499,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -617,15 +520,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -638,20 +541,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -664,21 +562,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -711,18 +603,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.eqv_stress_parameter()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/erp_radiation_efficiency.py b/src/ansys/dpf/core/operators/result/erp_radiation_efficiency.py
index 9827124d6d7..774d479efc8 100644
--- a/src/ansys/dpf/core/operators/result/erp_radiation_efficiency.py
+++ b/src/ansys/dpf/core/operators/result/erp_radiation_efficiency.py
@@ -4,40 +4,37 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class erp_radiation_efficiency(Operator):
- """Compute the radiation efficiency (enhanced erp divided by classical
- erp)
+ r"""Compute the radiation efficiency (enhanced erp divided by classical erp)
+
Parameters
----------
- fields_container : FieldsContainer
- The input field container expects
- displacements fields
- mesh : MeshedRegion or MeshesContainer
- The meshes region in this pin has to be
- boundary or skin mesh
- time_scoping : int or Scoping
- Load step number (if it's specified, the erp
- is computed only on the substeps of
- this step) or time scoping
- mass_density : float
- Mass density (if it's not specified, default
- value of the air is applied).
- speed_of_sound : float
- Speed of sound (if it's not specified,
- default value of the speed of sound
- in the air is applied).
+ fields_container: FieldsContainer
+ the input field container expects displacements fields
+ mesh: MeshedRegion or MeshesContainer
+ the meshes region in this pin has to be boundary or skin mesh
+ time_scoping: int or Scoping
+ load step number (if it's specified, the ERP is computed only on the substeps of this step) or time scoping
+ mass_density: float
+ mass density (if it's not specified, default value of the air is applied).
+ speed_of_sound: float
+ speed of sound (if it's not specified, default value of the speed of sound in the air is applied).
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -96,9 +93,9 @@ def __init__(
self.inputs.speed_of_sound.connect(speed_of_sound)
@staticmethod
- def _spec():
- description = """Compute the radiation efficiency (enhanced erp divided by classical
- erp)"""
+ def _spec() -> Specification:
+ description = r"""Compute the radiation efficiency (enhanced erp divided by classical erp)
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -106,38 +103,31 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""The input field container expects
- displacements fields""",
+ document=r"""the input field container expects displacements fields""",
),
1: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=False,
- document="""The meshes region in this pin has to be
- boundary or skin mesh""",
+ document=r"""the meshes region in this pin has to be boundary or skin mesh""",
),
2: PinSpecification(
name="time_scoping",
type_names=["int32", "vector", "scoping"],
optional=False,
- document="""Load step number (if it's specified, the erp
- is computed only on the substeps of
- this step) or time scoping""",
+ document=r"""load step number (if it's specified, the ERP is computed only on the substeps of this step) or time scoping""",
),
3: PinSpecification(
name="mass_density",
type_names=["double"],
optional=False,
- document="""Mass density (if it's not specified, default
- value of the air is applied).""",
+ document=r"""mass density (if it's not specified, default value of the air is applied).""",
),
4: PinSpecification(
name="speed_of_sound",
type_names=["double"],
optional=False,
- document="""Speed of sound (if it's not specified,
- default value of the speed of sound
- in the air is applied).""",
+ document=r"""speed of sound (if it's not specified, default value of the speed of sound in the air is applied).""",
),
},
map_output_pin_spec={
@@ -145,14 +135,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -161,29 +151,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="erp_radiation_efficiency", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsErpRadiationEfficiency:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsErpRadiationEfficiency
+ inputs:
+ An instance of InputsErpRadiationEfficiency.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsErpRadiationEfficiency:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsErpRadiationEfficiency
+ outputs:
+ An instance of OutputsErpRadiationEfficiency.
"""
return super().outputs
@@ -230,15 +227,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._speed_of_sound)
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- The input field container expects
- displacements fields
+ the input field container expects displacements fields
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -251,15 +248,15 @@ def fields_container(self):
return self._fields_container
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- The meshes region in this pin has to be
- boundary or skin mesh
+ the meshes region in this pin has to be boundary or skin mesh
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -272,16 +269,15 @@ def mesh(self):
return self._mesh
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Load step number (if it's specified, the erp
- is computed only on the substeps of
- this step) or time scoping
+ load step number (if it's specified, the ERP is computed only on the substeps of this step) or time scoping
- Parameters
- ----------
- my_time_scoping : int or Scoping
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -294,15 +290,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mass_density(self):
- """Allows to connect mass_density input to the operator.
+ def mass_density(self) -> Input:
+ r"""Allows to connect mass_density input to the operator.
- Mass density (if it's not specified, default
- value of the air is applied).
+ mass density (if it's not specified, default value of the air is applied).
- Parameters
- ----------
- my_mass_density : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -315,16 +311,15 @@ def mass_density(self):
return self._mass_density
@property
- def speed_of_sound(self):
- """Allows to connect speed_of_sound input to the operator.
+ def speed_of_sound(self) -> Input:
+ r"""Allows to connect speed_of_sound input to the operator.
- Speed of sound (if it's not specified,
- default value of the speed of sound
- in the air is applied).
+ speed of sound (if it's not specified, default value of the speed of sound in the air is applied).
- Parameters
- ----------
- my_speed_of_sound : float
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -357,18 +352,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.erp_radiation_efficiency()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/euler_load_buckling.py b/src/ansys/dpf/core/operators/result/euler_load_buckling.py
index a4325e3b7cf..27e4bb3a779 100644
--- a/src/ansys/dpf/core/operators/result/euler_load_buckling.py
+++ b/src/ansys/dpf/core/operators/result/euler_load_buckling.py
@@ -4,44 +4,41 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class euler_load_buckling(Operator):
- """Computing Euler's Critical Load. Formula: Ncr = n*E*I*pi*pi /(L*L)
+ r"""Computing Euler’s Critical Load. Formula: Ncr = n\ *E*\ I\ *pi*\ pi
+ /(L*L)
+
Parameters
----------
- field_beam_end_condition : DataSources or Field
- This pin contains file csv or field of beam's
- end condition added by the user. if
- there's no file added, it would take
- value of all beam's end condition as
- 1.
- field_beam_moment_inertia : Field
+ field_beam_end_condition: DataSources or Field
+ This pin contains file csv or field of beam's end condition added by the user. If there's no file added, it would take value of all beam's end condition as 1.
+ field_beam_moment_inertia: Field
Field of beam's moment inertia
- field_beam_young_modulus : Field
+ field_beam_young_modulus: Field
Field of beam's young modulus
- field_beam_length : Field
+ field_beam_length: Field
Field of beam's length
Returns
-------
- field_euler_critical_load : Field
- This field contains euler's critical load
- about the principle axis of the cross
- section having the least moment of
- inertia.
- field_euler_critical_load_yy : Field
- This field contains euler's critical load on
- axis y.
- field_euler_critical_load_zz : Field
- This field contains euler's critical load on
- axis z.
+ field_euler_critical_load: Field
+ This field contains Euler's Critical Load about the principle axis of the cross section having the least moment of inertia.
+ field_euler_critical_load_yy: Field
+ This field contains Euler's Critical Load on axis y.
+ field_euler_critical_load_zz: Field
+ This field contains Euler's Critical Load on axis z.
Examples
--------
@@ -96,10 +93,10 @@ def __init__(
self.inputs.field_beam_length.connect(field_beam_length)
@staticmethod
- def _spec():
- description = (
- """Computing Euler's Critical Load. Formula: Ncr = n*E*I*pi*pi /(L*L)"""
- )
+ def _spec() -> Specification:
+ description = r"""Computing Euler’s Critical Load. Formula: Ncr = n\ *E*\ I\ *pi*\ pi
+/(L*L)
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -107,29 +104,25 @@ def _spec():
name="field_beam_end_condition",
type_names=["data_sources", "field"],
optional=False,
- document="""This pin contains file csv or field of beam's
- end condition added by the user. if
- there's no file added, it would take
- value of all beam's end condition as
- 1.""",
+ document=r"""This pin contains file csv or field of beam's end condition added by the user. If there's no file added, it would take value of all beam's end condition as 1.""",
),
6: PinSpecification(
name="field_beam_moment_inertia",
type_names=["field"],
optional=False,
- document="""Field of beam's moment inertia""",
+ document=r"""Field of beam's moment inertia""",
),
7: PinSpecification(
name="field_beam_young_modulus",
type_names=["field"],
optional=False,
- document="""Field of beam's young modulus""",
+ document=r"""Field of beam's young modulus""",
),
8: PinSpecification(
name="field_beam_length",
type_names=["field"],
optional=False,
- document="""Field of beam's length""",
+ document=r"""Field of beam's length""",
),
},
map_output_pin_spec={
@@ -137,31 +130,26 @@ def _spec():
name="field_euler_critical_load",
type_names=["field"],
optional=False,
- document="""This field contains euler's critical load
- about the principle axis of the cross
- section having the least moment of
- inertia.""",
+ document=r"""This field contains Euler's Critical Load about the principle axis of the cross section having the least moment of inertia.""",
),
1: PinSpecification(
name="field_euler_critical_load_yy",
type_names=["field"],
optional=False,
- document="""This field contains euler's critical load on
- axis y.""",
+ document=r"""This field contains Euler's Critical Load on axis y.""",
),
2: PinSpecification(
name="field_euler_critical_load_zz",
type_names=["field"],
optional=False,
- document="""This field contains euler's critical load on
- axis z.""",
+ document=r"""This field contains Euler's Critical Load on axis z.""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -170,29 +158,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="euler_load_buckling", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsEulerLoadBuckling:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsEulerLoadBuckling
+ inputs:
+ An instance of InputsEulerLoadBuckling.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsEulerLoadBuckling:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsEulerLoadBuckling
+ outputs:
+ An instance of OutputsEulerLoadBuckling.
"""
return super().outputs
@@ -235,18 +230,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._field_beam_length)
@property
- def field_beam_end_condition(self):
- """Allows to connect field_beam_end_condition input to the operator.
+ def field_beam_end_condition(self) -> Input:
+ r"""Allows to connect field_beam_end_condition input to the operator.
- This pin contains file csv or field of beam's
- end condition added by the user. if
- there's no file added, it would take
- value of all beam's end condition as
- 1.
+ This pin contains file csv or field of beam's end condition added by the user. If there's no file added, it would take value of all beam's end condition as 1.
- Parameters
- ----------
- my_field_beam_end_condition : DataSources or Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -259,14 +251,15 @@ def field_beam_end_condition(self):
return self._field_beam_end_condition
@property
- def field_beam_moment_inertia(self):
- """Allows to connect field_beam_moment_inertia input to the operator.
+ def field_beam_moment_inertia(self) -> Input:
+ r"""Allows to connect field_beam_moment_inertia input to the operator.
Field of beam's moment inertia
- Parameters
- ----------
- my_field_beam_moment_inertia : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -279,14 +272,15 @@ def field_beam_moment_inertia(self):
return self._field_beam_moment_inertia
@property
- def field_beam_young_modulus(self):
- """Allows to connect field_beam_young_modulus input to the operator.
+ def field_beam_young_modulus(self) -> Input:
+ r"""Allows to connect field_beam_young_modulus input to the operator.
Field of beam's young modulus
- Parameters
- ----------
- my_field_beam_young_modulus : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -299,14 +293,15 @@ def field_beam_young_modulus(self):
return self._field_beam_young_modulus
@property
- def field_beam_length(self):
- """Allows to connect field_beam_length input to the operator.
+ def field_beam_length(self) -> Input:
+ r"""Allows to connect field_beam_length input to the operator.
Field of beam's length
- Parameters
- ----------
- my_field_beam_length : Field
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -349,52 +344,61 @@ def __init__(self, op: Operator):
self._outputs.append(self._field_euler_critical_load_zz)
@property
- def field_euler_critical_load(self):
- """Allows to get field_euler_critical_load output of the operator
+ def field_euler_critical_load(self) -> Output:
+ r"""Allows to get field_euler_critical_load output of the operator
+
+ This field contains Euler's Critical Load about the principle axis of the cross section having the least moment of inertia.
Returns
- ----------
- my_field_euler_critical_load : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.euler_load_buckling()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_euler_critical_load = op.outputs.field_euler_critical_load()
- """ # noqa: E501
+ """
return self._field_euler_critical_load
@property
- def field_euler_critical_load_yy(self):
- """Allows to get field_euler_critical_load_yy output of the operator
+ def field_euler_critical_load_yy(self) -> Output:
+ r"""Allows to get field_euler_critical_load_yy output of the operator
+
+ This field contains Euler's Critical Load on axis y.
Returns
- ----------
- my_field_euler_critical_load_yy : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.euler_load_buckling()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_euler_critical_load_yy = op.outputs.field_euler_critical_load_yy()
- """ # noqa: E501
+ """
return self._field_euler_critical_load_yy
@property
- def field_euler_critical_load_zz(self):
- """Allows to get field_euler_critical_load_zz output of the operator
+ def field_euler_critical_load_zz(self) -> Output:
+ r"""Allows to get field_euler_critical_load_zz output of the operator
+
+ This field contains Euler's Critical Load on axis z.
Returns
- ----------
- my_field_euler_critical_load_zz : Field
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.euler_load_buckling()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_field_euler_critical_load_zz = op.outputs.field_euler_critical_load_zz()
- """ # noqa: E501
+ """
return self._field_euler_critical_load_zz
diff --git a/src/ansys/dpf/core/operators/result/euler_nodes.py b/src/ansys/dpf/core/operators/result/euler_nodes.py
index c08f34855a1..0cde5ec36e1 100644
--- a/src/ansys/dpf/core/operators/result/euler_nodes.py
+++ b/src/ansys/dpf/core/operators/result/euler_nodes.py
@@ -4,35 +4,35 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class euler_nodes(Operator):
- """Reads a field made of 3 coordinates and 3 Euler angles (6 dofs) by
- node from the result file.
+ r"""Reads a field made of 3 coordinates and 3 Euler angles (6 dofs) by node
+ from the result file.
+
Parameters
----------
- streams_container : StreamsContainer or Stream, optional
- data_sources : DataSources
- filter_zeros : bool
- If true, then the field will only contain the
- scoping if any rotation is not zero.
- (default is false).
- coord_and_euler : bool
- If true, then the field has ncomp=6 with 3
- coordinates and 3 euler angles, else
- there is only the euler angles
- (default is true).
- mesh : MeshedRegion, optional
+ streams_container: StreamsContainer or Stream, optional
+ data_sources: DataSources
+ filter_zeros: bool
+ if true, then the field will only contain the scoping if any rotation is not zero. (default is false).
+ coord_and_euler: bool
+ if true, then the field has ncomp=6 with 3 coordinates and 3 Euler angles, else there is only the Euler angles (default is true).
+ mesh: MeshedRegion, optional
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -91,9 +91,10 @@ def __init__(
self.inputs.mesh.connect(mesh)
@staticmethod
- def _spec():
- description = """Reads a field made of 3 coordinates and 3 Euler angles (6 dofs) by
- node from the result file."""
+ def _spec() -> Specification:
+ description = r"""Reads a field made of 3 coordinates and 3 Euler angles (6 dofs) by node
+from the result file.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -101,36 +102,31 @@ def _spec():
name="streams_container",
type_names=["streams_container", "stream"],
optional=True,
- document="""""",
+ document=r"""""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""""",
+ document=r"""""",
),
5: PinSpecification(
name="filter_zeros",
type_names=["bool"],
optional=False,
- document="""If true, then the field will only contain the
- scoping if any rotation is not zero.
- (default is false).""",
+ document=r"""if true, then the field will only contain the scoping if any rotation is not zero. (default is false).""",
),
6: PinSpecification(
name="coord_and_euler",
type_names=["bool"],
optional=False,
- document="""If true, then the field has ncomp=6 with 3
- coordinates and 3 euler angles, else
- there is only the euler angles
- (default is true).""",
+ document=r"""if true, then the field has ncomp=6 with 3 coordinates and 3 Euler angles, else there is only the Euler angles (default is true).""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region"],
optional=True,
- document="""""",
+ document=r"""""",
),
},
map_output_pin_spec={
@@ -138,14 +134,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -154,29 +150,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="coords_and_euler_nodes", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsEulerNodes:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsEulerNodes
+ inputs:
+ An instance of InputsEulerNodes.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsEulerNodes:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsEulerNodes
+ outputs:
+ An instance of OutputsEulerNodes.
"""
return super().outputs
@@ -215,12 +218,13 @@ def __init__(self, op: Operator):
self._inputs.append(self._mesh)
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Parameters
- ----------
- my_streams_container : StreamsContainer or Stream
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -233,12 +237,13 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -251,16 +256,15 @@ def data_sources(self):
return self._data_sources
@property
- def filter_zeros(self):
- """Allows to connect filter_zeros input to the operator.
+ def filter_zeros(self) -> Input:
+ r"""Allows to connect filter_zeros input to the operator.
- If true, then the field will only contain the
- scoping if any rotation is not zero.
- (default is false).
+ if true, then the field will only contain the scoping if any rotation is not zero. (default is false).
- Parameters
- ----------
- my_filter_zeros : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -273,17 +277,15 @@ def filter_zeros(self):
return self._filter_zeros
@property
- def coord_and_euler(self):
- """Allows to connect coord_and_euler input to the operator.
+ def coord_and_euler(self) -> Input:
+ r"""Allows to connect coord_and_euler input to the operator.
- If true, then the field has ncomp=6 with 3
- coordinates and 3 euler angles, else
- there is only the euler angles
- (default is true).
+ if true, then the field has ncomp=6 with 3 coordinates and 3 Euler angles, else there is only the Euler angles (default is true).
- Parameters
- ----------
- my_coord_and_euler : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -296,12 +298,13 @@ def coord_and_euler(self):
return self._coord_and_euler
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Parameters
- ----------
- my_mesh : MeshedRegion
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -332,18 +335,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.euler_nodes()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/gasket_deformation.py b/src/ansys/dpf/core/operators/result/gasket_deformation.py
index 70749af4c59..451e4a9fa45 100644
--- a/src/ansys/dpf/core/operators/result/gasket_deformation.py
+++ b/src/ansys/dpf/core/operators/result/gasket_deformation.py
@@ -4,91 +4,51 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class gasket_deformation(Operator):
- """Read/compute elemental gasket deformation by calling the readers
- defined by the datasources. Regarding the requested location and
- the input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ r"""Read/compute elemental gasket deformation by calling the readers defined
+ by the datasources. Regarding the requested location and the input mesh
+ scoping, the result location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -183,11 +143,11 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute elemental gasket deformation by calling the readers
- defined by the datasources. Regarding the requested
- location and the input mesh scoping, the result location
- can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute elemental gasket deformation by calling the readers defined
+by the datasources. Regarding the requested location and the input mesh
+scoping, the result location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -202,111 +162,67 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -314,14 +230,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -330,29 +246,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="GKD", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsGasketDeformation:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsGasketDeformation
+ inputs:
+ An instance of InputsGasketDeformation.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsGasketDeformation:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsGasketDeformation
+ outputs:
+ An instance of OutputsGasketDeformation.
"""
return super().outputs
@@ -423,28 +346,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -457,24 +367,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -487,15 +388,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -508,15 +409,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -529,15 +430,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -550,15 +451,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -571,15 +472,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -592,15 +493,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -613,15 +514,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -634,20 +535,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -660,21 +556,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -705,18 +595,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.gasket_deformation()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/gasket_deformation_X.py b/src/ansys/dpf/core/operators/result/gasket_deformation_X.py
index fdf56d0e7ed..304a075d90d 100644
--- a/src/ansys/dpf/core/operators/result/gasket_deformation_X.py
+++ b/src/ansys/dpf/core/operators/result/gasket_deformation_X.py
@@ -4,79 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class gasket_deformation_X(Operator):
- """Read/compute elemental gasket deformation XX normal component (00
- component) by calling the readers defined by the datasources.
- Regarding the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute elemental gasket deformation XX normal component (00
+ component) by calling the readers defined by the datasources. Regarding
+ the requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -165,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute elemental gasket deformation XX normal component (00
- component) by calling the readers defined by the
- datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute elemental gasket deformation XX normal component (00
+component) by calling the readers defined by the datasources. Regarding
+the requested location and the input mesh scoping, the result location
+can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -185,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -281,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -297,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="GKDX", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsGasketDeformationX:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsGasketDeformationX
+ inputs:
+ An instance of InputsGasketDeformationX.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsGasketDeformationX:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsGasketDeformationX
+ outputs:
+ An instance of OutputsGasketDeformationX.
"""
return super().outputs
@@ -388,28 +332,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -422,24 +353,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -452,15 +374,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -473,15 +395,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -494,15 +416,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -515,15 +437,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -536,15 +458,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -557,14 +479,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -577,18 +500,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -601,15 +521,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -642,18 +562,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.gasket_deformation_X()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/gasket_deformation_XY.py b/src/ansys/dpf/core/operators/result/gasket_deformation_XY.py
index a3a83069d32..a84cc53958d 100644
--- a/src/ansys/dpf/core/operators/result/gasket_deformation_XY.py
+++ b/src/ansys/dpf/core/operators/result/gasket_deformation_XY.py
@@ -4,79 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class gasket_deformation_XY(Operator):
- """Read/compute elemental gasket deformation XY shear component (01
- component) by calling the readers defined by the datasources.
- Regarding the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute elemental gasket deformation XY shear component (01
+ component) by calling the readers defined by the datasources. Regarding
+ the requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -165,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute elemental gasket deformation XY shear component (01
- component) by calling the readers defined by the
- datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute elemental gasket deformation XY shear component (01
+component) by calling the readers defined by the datasources. Regarding
+the requested location and the input mesh scoping, the result location
+can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -185,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -281,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -297,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="GKDXY", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsGasketDeformationXy:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsGasketDeformationXy
+ inputs:
+ An instance of InputsGasketDeformationXy.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsGasketDeformationXy:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsGasketDeformationXy
+ outputs:
+ An instance of OutputsGasketDeformationXy.
"""
return super().outputs
@@ -396,28 +340,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -430,24 +361,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -460,15 +382,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -481,15 +403,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -502,15 +424,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -523,15 +445,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -544,15 +466,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,14 +487,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -585,18 +508,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -609,15 +529,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -650,18 +570,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.gasket_deformation_XY()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/gasket_deformation_XZ.py b/src/ansys/dpf/core/operators/result/gasket_deformation_XZ.py
index a0a92cf09af..11e474c8533 100644
--- a/src/ansys/dpf/core/operators/result/gasket_deformation_XZ.py
+++ b/src/ansys/dpf/core/operators/result/gasket_deformation_XZ.py
@@ -4,79 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class gasket_deformation_XZ(Operator):
- """Read/compute elemental gasket deformation XZ shear component (02
- component) by calling the readers defined by the datasources.
- Regarding the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute elemental gasket deformation XZ shear component (02
+ component) by calling the readers defined by the datasources. Regarding
+ the requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -165,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute elemental gasket deformation XZ shear component (02
- component) by calling the readers defined by the
- datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute elemental gasket deformation XZ shear component (02
+component) by calling the readers defined by the datasources. Regarding
+the requested location and the input mesh scoping, the result location
+can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -185,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -281,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -297,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="GKDXZ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsGasketDeformationXz:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsGasketDeformationXz
+ inputs:
+ An instance of InputsGasketDeformationXz.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsGasketDeformationXz:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsGasketDeformationXz
+ outputs:
+ An instance of OutputsGasketDeformationXz.
"""
return super().outputs
@@ -396,28 +340,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -430,24 +361,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -460,15 +382,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -481,15 +403,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -502,15 +424,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -523,15 +445,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -544,15 +466,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,14 +487,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -585,18 +508,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -609,15 +529,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -650,18 +570,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.gasket_deformation_XZ()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure.py b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure.py
index 655e59c3659..e669b0361d4 100644
--- a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure.py
+++ b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure.py
@@ -4,91 +4,52 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class gasket_inelastic_closure(Operator):
- """Read/compute elemental gasket inelastic closure by calling the readers
- defined by the datasources. Regarding the requested location and
- the input mesh scoping, the result location can be
+ r"""Read/compute elemental gasket inelastic closure by calling the readers
+ defined by the datasources. Regarding the requested location and the
+ input mesh scoping, the result location can be
Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -183,11 +144,12 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute elemental gasket inelastic closure by calling the readers
- defined by the datasources. Regarding the requested
- location and the input mesh scoping, the result location
- can be Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute elemental gasket inelastic closure by calling the readers
+defined by the datasources. Regarding the requested location and the
+input mesh scoping, the result location can be
+Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -202,111 +164,67 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fields container already allocated modified
- inplace""",
+ document=r"""Fields container already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location nodal, elemental or
- elementalnodal""",
+ document=r"""requested location Nodal, Elemental or ElementalNodal""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
26: PinSpecification(
name="split_shells",
type_names=["bool"],
optional=True,
- document="""This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.""",
+ document=r"""This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.""",
),
27: PinSpecification(
name="shell_layer",
type_names=["int32"],
optional=True,
- document="""If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.""",
+ document=r"""If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.""",
),
},
map_output_pin_spec={
@@ -314,14 +232,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -330,29 +248,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="GKDI", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsGasketInelasticClosure:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsGasketInelasticClosure
+ inputs:
+ An instance of InputsGasketInelasticClosure.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsGasketInelasticClosure:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsGasketInelasticClosure
+ outputs:
+ An instance of OutputsGasketInelasticClosure.
"""
return super().outputs
@@ -435,28 +360,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._shell_layer)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -469,24 +381,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -499,15 +402,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fields container already allocated modified
- inplace
+ Fields container already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -520,15 +423,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -541,15 +444,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -562,15 +465,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -583,15 +486,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -604,15 +507,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location nodal, elemental or
- elementalnodal
+ requested location Nodal, Elemental or ElementalNodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -625,15 +528,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -646,20 +549,15 @@ def read_beams(self):
return self._read_beams
@property
- def split_shells(self):
- """Allows to connect split_shells input to the operator.
+ def split_shells(self) -> Input:
+ r"""Allows to connect split_shells input to the operator.
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
- Parameters
- ----------
- my_split_shells : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -672,21 +570,15 @@ def split_shells(self):
return self._split_shells
@property
- def shell_layer(self):
- """Allows to connect shell_layer input to the operator.
-
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ def shell_layer(self) -> Input:
+ r"""Allows to connect shell_layer input to the operator.
- Parameters
- ----------
- my_shell_layer : int
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -719,18 +611,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.gasket_inelastic_closure()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_X.py b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_X.py
index d2fbae9de43..4b8a44e3563 100644
--- a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_X.py
+++ b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_X.py
@@ -4,79 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class gasket_inelastic_closure_X(Operator):
- """Read/compute elemental gasket inelastic closure XX normal component
- (00 component) by calling the readers defined by the datasources.
- Regarding the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute elemental gasket inelastic closure XX normal component (00
+ component) by calling the readers defined by the datasources. Regarding
+ the requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -165,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute elemental gasket inelastic closure XX normal component
- (00 component) by calling the readers defined by the
- datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute elemental gasket inelastic closure XX normal component (00
+component) by calling the readers defined by the datasources. Regarding
+the requested location and the input mesh scoping, the result location
+can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -185,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -281,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -297,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="GKDIX", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsGasketInelasticClosureX:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsGasketInelasticClosureX
+ inputs:
+ An instance of InputsGasketInelasticClosureX.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsGasketInelasticClosureX:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsGasketInelasticClosureX
+ outputs:
+ An instance of OutputsGasketInelasticClosureX.
"""
return super().outputs
@@ -396,28 +340,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -430,24 +361,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -460,15 +382,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -481,15 +403,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -502,15 +424,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -523,15 +445,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -544,15 +466,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,14 +487,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -585,18 +508,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -609,15 +529,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -650,18 +570,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.gasket_inelastic_closure_X()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XY.py b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XY.py
index 6d01c6b5c8f..ebfab1a3a39 100644
--- a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XY.py
+++ b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XY.py
@@ -4,79 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class gasket_inelastic_closure_XY(Operator):
- """Read/compute elemental gasket inelastic closure XY shear component (01
- component) by calling the readers defined by the datasources.
- Regarding the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute elemental gasket inelastic closure XY shear component (01
+ component) by calling the readers defined by the datasources. Regarding
+ the requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -165,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute elemental gasket inelastic closure XY shear component (01
- component) by calling the readers defined by the
- datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute elemental gasket inelastic closure XY shear component (01
+component) by calling the readers defined by the datasources. Regarding
+the requested location and the input mesh scoping, the result location
+can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -185,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -281,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -297,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="GKDIXY", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsGasketInelasticClosureXy:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsGasketInelasticClosureXy
+ inputs:
+ An instance of InputsGasketInelasticClosureXy.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsGasketInelasticClosureXy:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsGasketInelasticClosureXy
+ outputs:
+ An instance of OutputsGasketInelasticClosureXy.
"""
return super().outputs
@@ -396,28 +340,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -430,24 +361,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -460,15 +382,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -481,15 +403,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -502,15 +424,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -523,15 +445,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -544,15 +466,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,14 +487,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -585,18 +508,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -609,15 +529,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -650,18 +570,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.gasket_inelastic_closure_XY()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XZ.py b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XZ.py
index 2692c123ea2..3ed7a7a209d 100644
--- a/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XZ.py
+++ b/src/ansys/dpf/core/operators/result/gasket_inelastic_closure_XZ.py
@@ -4,79 +4,50 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class gasket_inelastic_closure_XZ(Operator):
- """Read/compute elemental gasket inelastic closure XZ shear component (02
- component) by calling the readers defined by the datasources.
- Regarding the requested location and the input mesh scoping, the
- result location can be Nodal/ElementalNodal/Elemental.
+ r"""Read/compute elemental gasket inelastic closure XZ shear component (02
+ component) by calling the readers defined by the datasources. Regarding
+ the requested location and the input mesh scoping, the result location
+ can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fieldscontainer already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location, default is nodal
- read_cyclic : int, optional
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ FieldsContainer already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location, default is Nodal
+ read_cyclic: int, optional
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -165,12 +136,12 @@ def __init__(
self.inputs.read_beams.connect(read_beams)
@staticmethod
- def _spec():
- description = """Read/compute elemental gasket inelastic closure XZ shear component (02
- component) by calling the readers defined by the
- datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute elemental gasket inelastic closure XZ shear component (02
+component) by calling the readers defined by the datasources. Regarding
+the requested location and the input mesh scoping, the result location
+can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -185,95 +156,61 @@ def _spec():
"vector",
],
optional=True,
- document="""Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.""",
+ document=r"""time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.""",
),
1: PinSpecification(
name="mesh_scoping",
type_names=["scopings_container", "scoping"],
optional=True,
- document="""Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains""",
+ document=r"""nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains""",
),
2: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=True,
- document="""Fieldscontainer already allocated modified
- inplace""",
+ document=r"""FieldsContainer already allocated modified inplace""",
),
3: PinSpecification(
name="streams_container",
type_names=["streams_container"],
optional=True,
- document="""Result file container allowed to be kept open
- to cache data""",
+ document=r"""result file container allowed to be kept open to cache data""",
),
4: PinSpecification(
name="data_sources",
type_names=["data_sources"],
optional=False,
- document="""Result file path container, used if no
- streams are set""",
+ document=r"""result file path container, used if no streams are set""",
),
5: PinSpecification(
name="bool_rotate_to_global",
type_names=["bool"],
optional=True,
- document="""If true the field is rotated to global
- coordinate system (default true)""",
+ document=r"""if true the field is rotated to global coordinate system (default true)""",
),
7: PinSpecification(
name="mesh",
type_names=["abstract_meshed_region", "meshes_container"],
optional=True,
- document="""Prevents from reading the mesh in the result
- files""",
+ document=r"""prevents from reading the mesh in the result files""",
),
9: PinSpecification(
name="requested_location",
type_names=["string"],
optional=True,
- document="""Requested location, default is nodal""",
+ document=r"""requested location, default is Nodal""",
),
14: PinSpecification(
name="read_cyclic",
type_names=["enum dataProcessing::ECyclicReading", "int32"],
optional=True,
- document="""If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)""",
+ document=r"""if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)""",
),
22: PinSpecification(
name="read_beams",
type_names=["bool"],
optional=True,
- document="""Elemental nodal beam results are read if this
- pin is set to true (default is false)""",
+ document=r"""elemental nodal beam results are read if this pin is set to true (default is false)""",
),
},
map_output_pin_spec={
@@ -281,14 +218,14 @@ def _spec():
name="fields_container",
type_names=["fields_container"],
optional=False,
- document="""""",
+ document=r"""""",
),
},
)
return spec
@staticmethod
- def default_config(server=None):
+ def default_config(server: AnyServerType = None) -> Config:
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
@@ -297,29 +234,36 @@ def default_config(server=None):
Parameters
----------
- server : server.DPFServer, optional
+ server:
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
+
+ Returns
+ -------
+ config:
+ A new Config instance equivalent to the default config for this operator.
"""
return Operator.default_config(name="GKDIXZ", server=server)
@property
- def inputs(self):
+ def inputs(self) -> InputsGasketInelasticClosureXz:
"""Enables to connect inputs to the operator
Returns
--------
- inputs : InputsGasketInelasticClosureXz
+ inputs:
+ An instance of InputsGasketInelasticClosureXz.
"""
return super().inputs
@property
- def outputs(self):
+ def outputs(self) -> OutputsGasketInelasticClosureXz:
"""Enables to get outputs of the operator by evaluating it
Returns
--------
- outputs : OutputsGasketInelasticClosureXz
+ outputs:
+ An instance of OutputsGasketInelasticClosureXz.
"""
return super().outputs
@@ -396,28 +340,15 @@ def __init__(self, op: Operator):
self._inputs.append(self._read_beams)
@property
- def time_scoping(self):
- """Allows to connect time_scoping input to the operator.
-
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
+ def time_scoping(self) -> Input:
+ r"""Allows to connect time_scoping input to the operator.
- Parameters
- ----------
- my_time_scoping : Scoping or int or float or Field
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -430,24 +361,15 @@ def time_scoping(self):
return self._time_scoping
@property
- def mesh_scoping(self):
- """Allows to connect mesh_scoping input to the operator.
-
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
+ def mesh_scoping(self) -> Input:
+ r"""Allows to connect mesh_scoping input to the operator.
- Parameters
- ----------
- my_mesh_scoping : ScopingsContainer or Scoping
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -460,15 +382,15 @@ def mesh_scoping(self):
return self._mesh_scoping
@property
- def fields_container(self):
- """Allows to connect fields_container input to the operator.
+ def fields_container(self) -> Input:
+ r"""Allows to connect fields_container input to the operator.
- Fieldscontainer already allocated modified
- inplace
+ FieldsContainer already allocated modified inplace
- Parameters
- ----------
- my_fields_container : FieldsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -481,15 +403,15 @@ def fields_container(self):
return self._fields_container
@property
- def streams_container(self):
- """Allows to connect streams_container input to the operator.
+ def streams_container(self) -> Input:
+ r"""Allows to connect streams_container input to the operator.
- Result file container allowed to be kept open
- to cache data
+ result file container allowed to be kept open to cache data
- Parameters
- ----------
- my_streams_container : StreamsContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -502,15 +424,15 @@ def streams_container(self):
return self._streams_container
@property
- def data_sources(self):
- """Allows to connect data_sources input to the operator.
+ def data_sources(self) -> Input:
+ r"""Allows to connect data_sources input to the operator.
- Result file path container, used if no
- streams are set
+ result file path container, used if no streams are set
- Parameters
- ----------
- my_data_sources : DataSources
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -523,15 +445,15 @@ def data_sources(self):
return self._data_sources
@property
- def bool_rotate_to_global(self):
- """Allows to connect bool_rotate_to_global input to the operator.
+ def bool_rotate_to_global(self) -> Input:
+ r"""Allows to connect bool_rotate_to_global input to the operator.
- If true the field is rotated to global
- coordinate system (default true)
+ if true the field is rotated to global coordinate system (default true)
- Parameters
- ----------
- my_bool_rotate_to_global : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -544,15 +466,15 @@ def bool_rotate_to_global(self):
return self._bool_rotate_to_global
@property
- def mesh(self):
- """Allows to connect mesh input to the operator.
+ def mesh(self) -> Input:
+ r"""Allows to connect mesh input to the operator.
- Prevents from reading the mesh in the result
- files
+ prevents from reading the mesh in the result files
- Parameters
- ----------
- my_mesh : MeshedRegion or MeshesContainer
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -565,14 +487,15 @@ def mesh(self):
return self._mesh
@property
- def requested_location(self):
- """Allows to connect requested_location input to the operator.
+ def requested_location(self) -> Input:
+ r"""Allows to connect requested_location input to the operator.
- Requested location, default is nodal
+ requested location, default is Nodal
- Parameters
- ----------
- my_requested_location : str
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -585,18 +508,15 @@ def requested_location(self):
return self._requested_location
@property
- def read_cyclic(self):
- """Allows to connect read_cyclic input to the operator.
+ def read_cyclic(self) -> Input:
+ r"""Allows to connect read_cyclic input to the operator.
- If 0 cyclic symmetry is ignored, if 1 cyclic
- sector is read, if 2 cyclic expansion
- is done, if 3 cyclic expansion is
- done and stages are merged (default
- is 1)
+ if 0 cyclic symmetry is ignored, if 1 cyclic sector is read, if 2 cyclic expansion is done, if 3 cyclic expansion is done and stages are merged (default is 1)
- Parameters
- ----------
- my_read_cyclic : int
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -609,15 +529,15 @@ def read_cyclic(self):
return self._read_cyclic
@property
- def read_beams(self):
- """Allows to connect read_beams input to the operator.
+ def read_beams(self) -> Input:
+ r"""Allows to connect read_beams input to the operator.
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
+ elemental nodal beam results are read if this pin is set to true (default is false)
- Parameters
- ----------
- my_read_beams : bool
+ Returns
+ -------
+ input:
+ An Input instance for this pin.
Examples
--------
@@ -650,18 +570,19 @@ def __init__(self, op: Operator):
self._outputs.append(self._fields_container)
@property
- def fields_container(self):
- """Allows to get fields_container output of the operator
+ def fields_container(self) -> Output:
+ r"""Allows to get fields_container output of the operator
Returns
- ----------
- my_fields_container : FieldsContainer
+ -------
+ output:
+ An Output instance for this pin.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.result.gasket_inelastic_closure_XZ()
- >>> # Connect inputs : op.inputs. ...
+ >>> # Get the output from op.outputs. ...
>>> result_fields_container = op.outputs.fields_container()
- """ # noqa: E501
+ """
return self._fields_container
diff --git a/src/ansys/dpf/core/operators/result/gasket_stress.py b/src/ansys/dpf/core/operators/result/gasket_stress.py
index 9ed8435d47c..c779352b28e 100644
--- a/src/ansys/dpf/core/operators/result/gasket_stress.py
+++ b/src/ansys/dpf/core/operators/result/gasket_stress.py
@@ -4,91 +4,51 @@
Autogenerated DPF operator classes.
"""
+from __future__ import annotations
+
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
+from ansys.dpf.core.config import Config
+from ansys.dpf.core.server_types import AnyServerType
class gasket_stress(Operator):
- """Read/compute elemental gasket stress by calling the readers defined by
- the datasources. Regarding the requested location and the input
- mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental.
+ r"""Read/compute elemental gasket stress by calling the readers defined by
+ the datasources. Regarding the requested location and the input mesh
+ scoping, the result location can be Nodal/ElementalNodal/Elemental.
+
Parameters
----------
- time_scoping : Scoping or int or float or Field, optional
- Time/freq values (use doubles or field),
- time/freq set ids (use ints or
- scoping) or time/freq step ids (use
- scoping with timefreq_steps location)
- required in output. to specify
- time/freq values at specific load
- steps, put a field (and not a list)
- in input with a scoping located on
- "timefreq_steps". linear time freq
- intrapolation is performed if the
- values are not in the result files
- and the data at the max time or freq
- is taken when time/freqs are higher
- than available time/freqs in result
- files.
- mesh_scoping : ScopingsContainer or Scoping, optional
- Nodes or elements scoping required in output.
- the output fields will be scoped on
- these node or element ids. to figure
- out the ordering of the fields data,
- look at their scoping ids as they
- might not be ordered as the input
- scoping was. the scoping's location
- indicates whether nodes or elements
- are asked for. using scopings
- container allows you to split the
- result fields container into domains
- fields_container : FieldsContainer, optional
- Fields container already allocated modified
- inplace
- streams_container : StreamsContainer, optional
- Result file container allowed to be kept open
- to cache data
- data_sources : DataSources
- Result file path container, used if no
- streams are set
- bool_rotate_to_global : bool, optional
- If true the field is rotated to global
- coordinate system (default true)
- mesh : MeshedRegion or MeshesContainer, optional
- Prevents from reading the mesh in the result
- files
- requested_location : str, optional
- Requested location nodal, elemental or
- elementalnodal
- read_beams : bool, optional
- Elemental nodal beam results are read if this
- pin is set to true (default is false)
- split_shells : bool, optional
- This pin forces elemental nodal shell and
- solid results to be split if this pin
- is set to true. if set to false
- (default), a specific shell layer is
- still needed to merge the fields.
- merge is possible only if a shell
- layer is provided.
- shell_layer : int, optional
- If the requested_location pin is not
- connected, and if split_shells pin is
- set to true, we choose one of the
- shell layer for shell element. if
- split_shells pin is set to false
- (default value) and a specific shell
- layer is provided, results will be
- merged on this specific shell layer.
+ time_scoping: Scoping or int or float or Field, optional
+ time/freq values (use doubles or field), time/freq set ids (use ints or scoping) or time/freq step ids (use scoping with TimeFreq_steps location) required in output. To specify time/freq values at specific load steps, put a Field (and not a list) in input with a scoping located on "TimeFreq_steps". Linear time freq intrapolation is performed if the values are not in the result files and the data at the max time or freq is taken when time/freqs are higher than available time/freqs in result files.
+ mesh_scoping: ScopingsContainer or Scoping, optional
+ nodes or elements scoping required in output. The output fields will be scoped on these node or element IDs. To figure out the ordering of the fields data, look at their scoping IDs as they might not be ordered as the input scoping was. The scoping's location indicates whether nodes or elements are asked for. Using scopings container allows you to split the result fields container into domains
+ fields_container: FieldsContainer, optional
+ Fields container already allocated modified inplace
+ streams_container: StreamsContainer, optional
+ result file container allowed to be kept open to cache data
+ data_sources: DataSources
+ result file path container, used if no streams are set
+ bool_rotate_to_global: bool, optional
+ if true the field is rotated to global coordinate system (default true)
+ mesh: MeshedRegion or MeshesContainer, optional
+ prevents from reading the mesh in the result files
+ requested_location: str, optional
+ requested location Nodal, Elemental or ElementalNodal
+ read_beams: bool, optional
+ elemental nodal beam results are read if this pin is set to true (default is false)
+ split_shells: bool, optional
+ This pin forces elemental nodal shell and solid results to be split if this pin is set to true. If set to false (default), a specific shell layer is still needed to merge the fields. Merge is possible only if a shell layer is provided.
+ shell_layer: int, optional
+ If the requested_location pin is not connected, and if split_shells pin is set to true, we choose one of the shell layer for shell element. If split_shells pin is set to false (default value) and a specific shell layer is provided, results will be merged on this specific shell layer.
Returns
-------
- fields_container : FieldsContainer
+ fields_container: FieldsContainer
Examples
--------
@@ -183,11 +143,11 @@ def __init__(
self.inputs.shell_layer.connect(shell_layer)
@staticmethod
- def _spec():
- description = """Read/compute elemental gasket stress by calling the readers defined by
- the datasources. Regarding the requested location and the
- input mesh scoping, the result location can be
- Nodal/ElementalNodal/Elemental."""
+ def _spec() -> Specification:
+ description = r"""Read/compute elemental gasket stress by calling the readers defined by
+the datasources. Regarding the requested location and the input mesh
+scoping, the result location can be Nodal/ElementalNodal/Elemental.
+"""
spec = Specification(
description=description,
map_input_pin_spec={
@@ -202,111 +162,67 @@ def _spec():
"vector