Skip to content

Conversation

@pyansys-ci-bot
Copy link
Collaborator

An update of generated code has been triggered either manually or by an update in the dpf-standalone repository.

@pyansys-ci-bot pyansys-ci-bot added the server-sync DO NOT USE, Related to automatic synchronization with the server label Nov 17, 2025
@github-actions
Copy link
Contributor

github-actions bot commented Nov 17, 2025

Some tests with 'continue-on-error: true' have failed:

@codecov
Copy link

codecov bot commented Nov 17, 2025

❌ 48 Tests Failed:

Tests completed Failed Passed Skipped
1412 48 1364 486
View the top 3 failed test(s) by shortest run time
tests\test_operator.py::test_operator::test_empty_specification[in Process CLayer]
Stack Traces | 0.004s run time
server_type = <ansys.dpf.core.server_types.InProcessServer object at 0x000002D0ED46BAC0>

    def test_empty_specification(server_type):
>       op = dpf.core.dpf_operator.Operator("chunk_fc", server=server_type)

tests\test_operator.py:1361: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0809C2D40>
name = 'chunk_fc', config = None
server = <ansys.dpf.core.server_types.InProcessServer object at 0x000002D0ED46BAC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_operator.py::test_operator::test_operator_specification_none[in Process CLayer]
Stack Traces | 0.004s run time
server_type = <ansys.dpf.core.server_types.InProcessServer object at 0x000002D0ED46BAC0>

    def test_operator_specification_none(server_type):
>       op = dpf.core.Operator("mapdl::rst::thickness", server=server_type)

tests\test_operator.py:1301: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D08087B430>
name = 'mapdl::rst::thickness', config = None
server = <ansys.dpf.core.server_types.InProcessServer object at 0x000002D0ED46BAC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_meshregion.py::test_meshregion::test_get_scoping_meshedregion_from_operator[in Process CLayer]
Stack Traces | 0.006s run time
simple_bar = 'D:\\a\\pydpf-core\\pydpf-core\\.tox\\test-api\\lib\\site-packages\\ansys\\dpf\\core\\examples\\result_files\\ASimpleBar.rst'
server_type = <ansys.dpf.core.server_types.InProcessServer object at 0x000002D0ED46BAC0>

    def test_get_scoping_meshedregion_from_operator(simple_bar, server_type):
        dataSource = dpf.core.DataSources(server=server_type)
        dataSource.set_result_file_path(simple_bar)
>       mesh = dpf.core.Operator("mapdl::rst::MeshProvider", server=server_type)

tests\test_meshregion.py:40: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D080198820>
name = 'mapdl::rst::MeshProvider', config = None
server = <ansys.dpf.core.server_types.InProcessServer object at 0x000002D0ED46BAC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_operator.py::test_operator::test_empty_specification[ansys-grpc-dpf]
Stack Traces | 0.006s run time
server_type = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0E9F4D1E0>

    def test_empty_specification(server_type):
>       op = dpf.core.dpf_operator.Operator("chunk_fc", server=server_type)

tests\test_operator.py:1361: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0808D5CC0>
name = 'chunk_fc', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0E9F4D1E0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_operator.py::test_operator::test_empty_specification[gRPC CLayer]
Stack Traces | 0.006s run time
server_type = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D08019C970>

    def test_empty_specification(server_type):
>       op = dpf.core.dpf_operator.Operator("chunk_fc", server=server_type)

tests\test_operator.py:1361: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D088713670>
name = 'chunk_fc', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D08019C970>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_operator.py::test_operator::test_operator_specification_none[ansys-grpc-dpf]
Stack Traces | 0.006s run time
server_type = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0E9F4D1E0>

    def test_operator_specification_none(server_type):
>       op = dpf.core.Operator("mapdl::rst::thickness", server=server_type)

tests\test_operator.py:1301: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0852F52D0>
name = 'mapdl::rst::thickness', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0E9F4D1E0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_operator.py::test_operator::test_operator_specification_none[gRPC CLayer]
Stack Traces | 0.006s run time
server_type = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D08019C970>

    def test_operator_specification_none(server_type):
>       op = dpf.core.Operator("mapdl::rst::thickness", server=server_type)

tests\test_operator.py:1301: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D087FB6320>
name = 'mapdl::rst::thickness', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D08019C970>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_resultinfo.py::test_resultinfo::test_get_resultinfo_no_model[in Process CLayer]
Stack Traces | 0.006s run time
velocity_acceleration = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles\\rst_operators\\velocity_acceleration.rst'
server_type = <ansys.dpf.core.server_types.InProcessServer object at 0x000002D0ED46BAC0>

    def test_get_resultinfo_no_model(velocity_acceleration, server_type):
        dataSource = dpf.core.DataSources(velocity_acceleration, server=server_type)
        dataSource.set_result_file_path(velocity_acceleration)
>       op = dpf.core.Operator("mapdl::rst::ResultInfoProvider", server=server_type)

tests\test_resultinfo.py:51: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0804925F0>
name = 'mapdl::rst::ResultInfoProvider', config = None
server = <ansys.dpf.core.server_types.InProcessServer object at 0x000002D0ED46BAC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_timefreqsupport.py::test_timefreqsupport::test_get_timefreqsupport[in Process CLayer]
Stack Traces | 0.006s run time
velocity_acceleration = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles\\rst_operators\\velocity_acceleration.rst'
server_type = <ansys.dpf.core.server_types.InProcessServer object at 0x000002D0ED46BAC0>

    def test_get_timefreqsupport(velocity_acceleration, server_type):
        dataSource = dpf.core.DataSources(server=server_type)
        dataSource.set_result_file_path(velocity_acceleration)
>       op = dpf.core.Operator("mapdl::rst::TimeFreqSupportProvider", server=server_type)

tests\test_timefreqsupport.py:42: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0ED53FD00>
name = 'mapdl::rst::TimeFreqSupportProvider', config = None
server = <ansys.dpf.core.server_types.InProcessServer object at 0x000002D0ED46BAC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_meshregion.py::test_meshregion::test_get_scoping_meshedregion_from_operator[gRPC CLayer]
Stack Traces | 0.009s run time
simple_bar = 'D:\\a\\pydpf-core\\pydpf-core\\.tox\\test-api\\lib\\site-packages\\ansys\\dpf\\core\\examples\\result_files\\ASimpleBar.rst'
server_type = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D08019C970>

    def test_get_scoping_meshedregion_from_operator(simple_bar, server_type):
        dataSource = dpf.core.DataSources(server=server_type)
        dataSource.set_result_file_path(simple_bar)
>       mesh = dpf.core.Operator("mapdl::rst::MeshProvider", server=server_type)

tests\test_meshregion.py:40: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D08045DC30>
name = 'mapdl::rst::MeshProvider', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D08019C970>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_timefreqsupport.py::test_timefreqsupport::test_delete_auto_timefreqsupport
Stack Traces | 0.009s run time
simple_rst = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles\\rst_operators\\simpleModel.rst'

    def test_delete_auto_timefreqsupport(simple_rst):
        dataSource = dpf.core.DataSources()
        dataSource.set_result_file_path(simple_rst)
>       op = dpf.core.Operator("mapdl::rst::TimeFreqSupportProvider")

tests\test_timefreqsupport.py:101: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D087F92FB0>
name = 'mapdl::rst::TimeFreqSupportProvider', config = None, server = None
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_timefreqsupport.py::test_timefreqsupport::test_delete_timefreqsupport
Stack Traces | 0.009s run time
velocity_acceleration = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles\\rst_operators\\velocity_acceleration.rst'

    def test_delete_timefreqsupport(velocity_acceleration):
        dataSource = dpf.core.DataSources()
        dataSource.set_result_file_path(velocity_acceleration)
>       op = dpf.core.Operator("mapdl::rst::TimeFreqSupportProvider")

tests\test_timefreqsupport.py:87: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D08488E320>
name = 'mapdl::rst::TimeFreqSupportProvider', config = None, server = None
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_timefreqsupport.py::test_timefreqsupport::test_print_timefreqsupport
Stack Traces | 0.009s run time
velocity_acceleration = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles\\rst_operators\\velocity_acceleration.rst'

    def test_print_timefreqsupport(velocity_acceleration):
        dataSource = dpf.core.DataSources()
        dataSource.set_result_file_path(velocity_acceleration)
>       op = dpf.core.Operator("mapdl::rst::TimeFreqSupportProvider")

tests\test_timefreqsupport.py:75: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D08488E050>
name = 'mapdl::rst::TimeFreqSupportProvider', config = None, server = None
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_timefreqsupport.py::test_timefreqsupport::test_get_frequencies_timefreqsupport
Stack Traces | 0.01s run time
velocity_acceleration = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles\\rst_operators\\velocity_acceleration.rst'

    def test_get_frequencies_timefreqsupport(velocity_acceleration):
        dataSource = dpf.core.DataSources()
        dataSource.set_result_file_path(velocity_acceleration)
>       op = dpf.core.Operator("mapdl::rst::TimeFreqSupportProvider")

tests\test_timefreqsupport.py:64: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D087F4E950>
name = 'mapdl::rst::TimeFreqSupportProvider', config = None, server = None
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_timefreqsupport.py::test_timefreqsupport::test_get_timefreqsupport[ansys-grpc-dpf]
Stack Traces | 0.01s run time
velocity_acceleration = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles\\rst_operators\\velocity_acceleration.rst'
server_type = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0E9F4D1E0>

    def test_get_timefreqsupport(velocity_acceleration, server_type):
        dataSource = dpf.core.DataSources(server=server_type)
        dataSource.set_result_file_path(velocity_acceleration)
>       op = dpf.core.Operator("mapdl::rst::TimeFreqSupportProvider", server=server_type)

tests\test_timefreqsupport.py:42: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D082741DE0>
name = 'mapdl::rst::TimeFreqSupportProvider', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0E9F4D1E0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_timefreqsupport.py::test_timefreqsupport::test_get_timefreqsupport[gRPC CLayer]
Stack Traces | 0.01s run time
velocity_acceleration = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles\\rst_operators\\velocity_acceleration.rst'
server_type = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D08019C970>

    def test_get_timefreqsupport(velocity_acceleration, server_type):
        dataSource = dpf.core.DataSources(server=server_type)
        dataSource.set_result_file_path(velocity_acceleration)
>       op = dpf.core.Operator("mapdl::rst::TimeFreqSupportProvider", server=server_type)

tests\test_timefreqsupport.py:42: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D08045D300>
name = 'mapdl::rst::TimeFreqSupportProvider', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D08019C970>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_meshregion.py::test_meshregion::test_get_scoping_meshedregion_from_operator[ansys-grpc-dpf]
Stack Traces | 0.011s run time
simple_bar = 'D:\\a\\pydpf-core\\pydpf-core\\.tox\\test-api\\lib\\site-packages\\ansys\\dpf\\core\\examples\\result_files\\ASimpleBar.rst'
server_type = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0E9F4D1E0>

    def test_get_scoping_meshedregion_from_operator(simple_bar, server_type):
        dataSource = dpf.core.DataSources(server=server_type)
        dataSource.set_result_file_path(simple_bar)
>       mesh = dpf.core.Operator("mapdl::rst::MeshProvider", server=server_type)

tests\test_meshregion.py:40: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D085316E60>
name = 'mapdl::rst::MeshProvider', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0E9F4D1E0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_resultinfo.py::test_resultinfo::test_get_resultinfo_no_model[ansys-grpc-dpf]
Stack Traces | 0.011s run time
velocity_acceleration = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles\\rst_operators\\velocity_acceleration.rst'
server_type = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0E9F4D1E0>

    def test_get_resultinfo_no_model(velocity_acceleration, server_type):
        dataSource = dpf.core.DataSources(velocity_acceleration, server=server_type)
        dataSource.set_result_file_path(velocity_acceleration)
>       op = dpf.core.Operator("mapdl::rst::ResultInfoProvider", server=server_type)

tests\test_resultinfo.py:51: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0846C2650>
name = 'mapdl::rst::ResultInfoProvider', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0E9F4D1E0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_resultinfo.py::test_resultinfo::test_get_resultinfo_no_model[gRPC CLayer]
Stack Traces | 0.011s run time
velocity_acceleration = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles\\rst_operators\\velocity_acceleration.rst'
server_type = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D08019C970>

    def test_get_resultinfo_no_model(velocity_acceleration, server_type):
        dataSource = dpf.core.DataSources(velocity_acceleration, server=server_type)
        dataSource.set_result_file_path(velocity_acceleration)
>       op = dpf.core.Operator("mapdl::rst::ResultInfoProvider", server=server_type)

tests\test_resultinfo.py:51: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D087FB1930>
name = 'mapdl::rst::ResultInfoProvider', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D08019C970>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_lists[gRPC CLayer]
Stack Traces | 0.016s run time
server_type_remote_process = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_lists(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
>       op = dpf.Operator("custom_forward_vec_int", server=server_type_remote_process)

tests\test_python_plugins.py:102: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0804EDF90>
name = 'custom_forward_vec_int', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_lists[ansys-grpc-dpf]
Stack Traces | 0.017s run time
server_type_remote_process = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_lists(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
>       op = dpf.Operator("custom_forward_vec_int", server=server_type_remote_process)

tests\test_python_plugins.py:102: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D087FB5360>
name = 'custom_forward_vec_int', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_field[gRPC CLayer]
Stack Traces | 0.022s run time
server_type_remote_process = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_field(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.fields_factory.create_3d_vector_field(3, "Elemental", server=server_type_remote_process)
        f.data = np.ones((3, 3), dtype=np.float64)
>       op = dpf.Operator("custom_forward_field", server=server_type_remote_process)

tests\test_python_plugins.py:117: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D080410B20>
name = 'custom_forward_field', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_property_field[gRPC CLayer]
Stack Traces | 0.022s run time
server_type_remote_process = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_property_field(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.PropertyField(server=server_type_remote_process)
        f.data = np.ones((9), dtype=np.int32)
>       op = dpf.Operator("custom_forward_property_field", server=server_type_remote_process)

tests\test_python_plugins.py:127: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0808AF7C0>
name = 'custom_forward_property_field', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_scoping[gRPC CLayer]
Stack Traces | 0.023s run time
server_type_remote_process = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_scoping(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.Scoping(location="Elemental", server=server_type_remote_process)
>       op = dpf.Operator("custom_forward_scoping", server=server_type_remote_process)

tests\test_python_plugins.py:161: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0ED53F280>
name = 'custom_forward_scoping', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_syntax_error[gRPC CLayer]
Stack Traces | 0.023s run time
server_type_remote_process = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    @conftest.raises_for_servers_version_under("4.0")
    def test_syntax_error(server_type_remote_process, testfiles_dir):
        dpf.load_library(
            dpf.path_utilities.to_server_os(
                Path(testfiles_dir) / "pythonPlugins" / "syntax_error_plugin",
                server_type_remote_process,
            ),
            "py_raising",
            "load_operators",
            server=server_type_remote_process,
        )
>       op = dpf.Operator("raising", server=server_type_remote_process)

tests\test_python_plugins.py:257: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D087F4CEE0>
name = 'raising', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_workflow[gRPC CLayer]
Stack Traces | 0.023s run time
server_type_remote_process = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    @pytest.mark.skipif(
        platform.system() == "Windows" and platform.python_version().startswith("3.8"),
        reason="Random SEGFAULT in the GitHub pipeline for 3.8 on Windows",
    )
    def test_workflow(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.Workflow(server=server_type_remote_process)
        f.progress_bar = False
>       op = dpf.Operator("custom_forward_workflow", server=server_type_remote_process)

tests\test_python_plugins.py:218: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D080493010>
name = 'custom_forward_workflow', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_scoping[ansys-grpc-dpf]
Stack Traces | 0.024s run time
server_type_remote_process = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_scoping(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.Scoping(location="Elemental", server=server_type_remote_process)
>       op = dpf.Operator("custom_forward_scoping", server=server_type_remote_process)

tests\test_python_plugins.py:161: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0803EA4A0>
name = 'custom_forward_scoping', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_workflow[ansys-grpc-dpf]
Stack Traces | 0.024s run time
server_type_remote_process = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    @pytest.mark.skipif(
        platform.system() == "Windows" and platform.python_version().startswith("3.8"),
        reason="Random SEGFAULT in the GitHub pipeline for 3.8 on Windows",
    )
    def test_workflow(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.Workflow(server=server_type_remote_process)
        f.progress_bar = False
>       op = dpf.Operator("custom_forward_workflow", server=server_type_remote_process)

tests\test_python_plugins.py:218: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D085315060>
name = 'custom_forward_workflow', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_data_sources[ansys-grpc-dpf]
Stack Traces | 0.025s run time
server_type_remote_process = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_data_sources(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.DataSources("file.rst", server=server_type_remote_process)
>       op = dpf.Operator("custom_forward_data_sources", server=server_type_remote_process)

tests\test_python_plugins.py:205: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D085317E20>
name = 'custom_forward_data_sources', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_syntax_error[ansys-grpc-dpf]
Stack Traces | 0.025s run time
server_type_remote_process = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    @conftest.raises_for_servers_version_under("4.0")
    def test_syntax_error(server_type_remote_process, testfiles_dir):
        dpf.load_library(
            dpf.path_utilities.to_server_os(
                Path(testfiles_dir) / "pythonPlugins" / "syntax_error_plugin",
                server_type_remote_process,
            ),
            "py_raising",
            "load_operators",
            server=server_type_remote_process,
        )
>       op = dpf.Operator("raising", server=server_type_remote_process)

tests\test_python_plugins.py:257: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0887CEB60>
name = 'raising', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_meshes_container[ansys-grpc-dpf]
Stack Traces | 0.026s run time
server_type_remote_process = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_meshes_container(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.MeshedRegion(server=server_type_remote_process)
        sc = dpf.MeshesContainer(server=server_type_remote_process)
        sc.add_mesh({}, f)
>       op = dpf.Operator("custom_forward_meshes_container", server=server_type_remote_process)

tests\test_python_plugins.py:197: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0804EC130>
name = 'custom_forward_meshes_container', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_property_field[ansys-grpc-dpf]
Stack Traces | 0.026s run time
server_type_remote_process = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_property_field(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.PropertyField(server=server_type_remote_process)
        f.data = np.ones((9), dtype=np.int32)
>       op = dpf.Operator("custom_forward_property_field", server=server_type_remote_process)

tests\test_python_plugins.py:127: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D080406740>
name = 'custom_forward_property_field', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_data_tree[ansys-grpc-dpf]
Stack Traces | 0.027s run time
server_type_remote_process = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_data_tree(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.DataTree(server=server_type_remote_process)
        f.add(name="Paul")
>       op = dpf.Operator("custom_forward_data_tree", server=server_type_remote_process)

tests\test_python_plugins.py:227: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D08468EA40>
name = 'custom_forward_data_tree', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_data_sources[gRPC CLayer]
Stack Traces | 0.028s run time
server_type_remote_process = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_data_sources(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.DataSources("file.rst", server=server_type_remote_process)
>       op = dpf.Operator("custom_forward_data_sources", server=server_type_remote_process)

tests\test_python_plugins.py:205: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0803E9510>
name = 'custom_forward_data_sources', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_data_tree[gRPC CLayer]
Stack Traces | 0.028s run time
server_type_remote_process = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_data_tree(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.DataTree(server=server_type_remote_process)
        f.add(name="Paul")
>       op = dpf.Operator("custom_forward_data_tree", server=server_type_remote_process)

tests\test_python_plugins.py:227: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D087F33700>
name = 'custom_forward_data_tree', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_scopings_container[ansys-grpc-dpf]
Stack Traces | 0.028s run time
server_type_remote_process = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_scopings_container(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.Scoping(location="Elemental", server=server_type_remote_process)
        sc = dpf.ScopingsContainer(server=server_type_remote_process)
        sc.add_scoping({}, f)
>       op = dpf.Operator("custom_forward_scopings_container", server=server_type_remote_process)

tests\test_python_plugins.py:187: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0809C19F0>
name = 'custom_forward_scopings_container', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_scopings_container[gRPC CLayer]
Stack Traces | 0.028s run time
server_type_remote_process = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_scopings_container(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.Scoping(location="Elemental", server=server_type_remote_process)
        sc = dpf.ScopingsContainer(server=server_type_remote_process)
        sc.add_scoping({}, f)
>       op = dpf.Operator("custom_forward_scopings_container", server=server_type_remote_process)

tests\test_python_plugins.py:187: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0808D4A30>
name = 'custom_forward_scopings_container', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_field[ansys-grpc-dpf]
Stack Traces | 0.029s run time
server_type_remote_process = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_field(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.fields_factory.create_3d_vector_field(3, "Elemental", server=server_type_remote_process)
        f.data = np.ones((3, 3), dtype=np.float64)
>       op = dpf.Operator("custom_forward_field", server=server_type_remote_process)

tests\test_python_plugins.py:117: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D085314B50>
name = 'custom_forward_field', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_fields_container[gRPC CLayer]
Stack Traces | 0.029s run time
server_type_remote_process = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_fields_container(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.fields_factory.create_3d_vector_field(3, "Elemental", server=server_type_remote_process)
        f.data = np.ones((3, 3), dtype=np.float64)
        fc = dpf.fields_container_factory.over_time_freq_fields_container(
            [f], server=server_type_remote_process
        )
>       op = dpf.Operator("custom_forward_fields_container", server=server_type_remote_process)

tests\test_python_plugins.py:173: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D08874FBB0>
name = 'custom_forward_fields_container', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_meshes_container[gRPC CLayer]
Stack Traces | 0.029s run time
server_type_remote_process = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_meshes_container(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.MeshedRegion(server=server_type_remote_process)
        sc = dpf.MeshesContainer(server=server_type_remote_process)
        sc.add_mesh({}, f)
>       op = dpf.Operator("custom_forward_meshes_container", server=server_type_remote_process)

tests\test_python_plugins.py:197: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D08019B640>
name = 'custom_forward_meshes_container', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_fields_container[ansys-grpc-dpf]
Stack Traces | 0.034s run time
server_type_remote_process = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_fields_container(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
        f = dpf.fields_factory.create_3d_vector_field(3, "Elemental", server=server_type_remote_process)
        f.data = np.ones((3, 3), dtype=np.float64)
        fc = dpf.fields_container_factory.over_time_freq_fields_container(
            [f], server=server_type_remote_process
        )
>       op = dpf.Operator("custom_forward_fields_container", server=server_type_remote_process)

tests\test_python_plugins.py:173: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0887CD0F0>
name = 'custom_forward_fields_container', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_meshregion.py::test_meshregion::test_mesh_deep_copy2[in Process CLayer]
Stack Traces | 0.041s run time
simple_bar_model = <ansys.dpf.core.model.Model object at 0x000002D08108A170>
server_type = <ansys.dpf.core.server_types.InProcessServer object at 0x000002D0ED46BAC0>

    @pytest.mark.slow
    def test_mesh_deep_copy2(simple_bar_model, server_type):
        mesh = simple_bar_model.metadata.meshed_region
>       copy = mesh.deep_copy()

tests\test_meshregion.py:620: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox\test-api\lib\site-packages\ansys\dpf\core\meshed_region.py:665: in deep_copy
    return _deep_copy(self, server=server)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

dpf_entity = <ansys.dpf.core.meshed_region.MeshedRegion object at 0x000002D08108A890>
server = None

    def _deep_copy(dpf_entity, server=None):
        """Return a copy of the entity in the requested server.
    
        Parameters
        ----------
        dpf_entity: core.Operator, core.Workflow, core.Scoping,
                    core.Field, core.FieldsContainer, core.MeshedRegion...
            Dpf entity to deep_copy
    
        server : server.DPFServer, optional
            Server with channel connected to the remote or local instance. When
            ``None``, attempts to use the global server.
    
        Returns
        -------
           deep_copy of dpf_entity: core.Operator, core.Workflow, core.Scoping,
                                    core.Field, core.FieldsContainer, core.MeshedRegion...
        """
        from ansys.dpf.core.common import types, types_enum_to_types
        from ansys.dpf.core.operators.serialization import serializer_to_string, string_deserializer
    
        entity_server = dpf_entity._server if hasattr(dpf_entity, "_server") else None
        serializer = serializer_to_string(server=entity_server)
        serializer.connect(1, dpf_entity)
        deserializer = string_deserializer(server=server)
        stream_type = 1 if server_meet_version("8.0", serializer._server) else 0
        serializer.connect(-1, stream_type)
        if stream_type == 1:
            out = serializer.get_output(0, types.bytes)
        else:
>           out = serializer.outputs.serialized_string  # Required for retro with 241
E           AttributeError: 'OutputsSerializerToString' object has no attribute 'serialized_string'. Did you mean: 'serialized_string1'?

.tox\test-api\lib\site-packages\ansys\dpf\core\core.py:314: AttributeError
tests\test_meshregion.py::test_meshregion::test_mesh_deep_copy2[gRPC CLayer]
Stack Traces | 0.06s run time
simple_bar_model = <ansys.dpf.core.model.Model object at 0x000002D087FA4C70>
server_type = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D08019C970>

    @pytest.mark.slow
    def test_mesh_deep_copy2(simple_bar_model, server_type):
        mesh = simple_bar_model.metadata.meshed_region
>       copy = mesh.deep_copy()

tests\test_meshregion.py:620: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox\test-api\lib\site-packages\ansys\dpf\core\meshed_region.py:665: in deep_copy
    return _deep_copy(self, server=server)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

dpf_entity = <ansys.dpf.core.meshed_region.MeshedRegion object at 0x000002D087FA7F70>
server = None

    def _deep_copy(dpf_entity, server=None):
        """Return a copy of the entity in the requested server.
    
        Parameters
        ----------
        dpf_entity: core.Operator, core.Workflow, core.Scoping,
                    core.Field, core.FieldsContainer, core.MeshedRegion...
            Dpf entity to deep_copy
    
        server : server.DPFServer, optional
            Server with channel connected to the remote or local instance. When
            ``None``, attempts to use the global server.
    
        Returns
        -------
           deep_copy of dpf_entity: core.Operator, core.Workflow, core.Scoping,
                                    core.Field, core.FieldsContainer, core.MeshedRegion...
        """
        from ansys.dpf.core.common import types, types_enum_to_types
        from ansys.dpf.core.operators.serialization import serializer_to_string, string_deserializer
    
        entity_server = dpf_entity._server if hasattr(dpf_entity, "_server") else None
        serializer = serializer_to_string(server=entity_server)
        serializer.connect(1, dpf_entity)
        deserializer = string_deserializer(server=server)
        stream_type = 1 if server_meet_version("8.0", serializer._server) else 0
        serializer.connect(-1, stream_type)
        if stream_type == 1:
            out = serializer.get_output(0, types.bytes)
        else:
>           out = serializer.outputs.serialized_string  # Required for retro with 241
E           AttributeError: 'OutputsSerializerToString' object has no attribute 'serialized_string'. Did you mean: 'serialized_string1'?

.tox\test-api\lib\site-packages\ansys\dpf\core\core.py:314: AttributeError
tests\test_meshregion.py::test_meshregion::test_mesh_deep_copy[in Process CLayer]
Stack Traces | 0.082s run time
allkindofcomplexity = 'D:\\a\\pydpf-core\\pydpf-core\\.tox\\test-api\\lib\\site-packages\\ansys\\dpf\\core\\examples\\result_files\\testing\\allKindOfComplexity.rst'
server_type = <ansys.dpf.core.server_types.InProcessServer object at 0x000002D0ED46BAC0>

    @pytest.mark.slow
    def test_mesh_deep_copy(allkindofcomplexity, server_type):
        # Small mesh
        model = dpf.core.Model(allkindofcomplexity, server=server_type)
        mesh = model.metadata.meshed_region
>       copy = mesh.deep_copy()

tests\test_meshregion.py:580: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox\test-api\lib\site-packages\ansys\dpf\core\meshed_region.py:665: in deep_copy
    return _deep_copy(self, server=server)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

dpf_entity = <ansys.dpf.core.meshed_region.MeshedRegion object at 0x000002D0804929B0>
server = None

    def _deep_copy(dpf_entity, server=None):
        """Return a copy of the entity in the requested server.
    
        Parameters
        ----------
        dpf_entity: core.Operator, core.Workflow, core.Scoping,
                    core.Field, core.FieldsContainer, core.MeshedRegion...
            Dpf entity to deep_copy
    
        server : server.DPFServer, optional
            Server with channel connected to the remote or local instance. When
            ``None``, attempts to use the global server.
    
        Returns
        -------
           deep_copy of dpf_entity: core.Operator, core.Workflow, core.Scoping,
                                    core.Field, core.FieldsContainer, core.MeshedRegion...
        """
        from ansys.dpf.core.common import types, types_enum_to_types
        from ansys.dpf.core.operators.serialization import serializer_to_string, string_deserializer
    
        entity_server = dpf_entity._server if hasattr(dpf_entity, "_server") else None
        serializer = serializer_to_string(server=entity_server)
        serializer.connect(1, dpf_entity)
        deserializer = string_deserializer(server=server)
        stream_type = 1 if server_meet_version("8.0", serializer._server) else 0
        serializer.connect(-1, stream_type)
        if stream_type == 1:
            out = serializer.get_output(0, types.bytes)
        else:
>           out = serializer.outputs.serialized_string  # Required for retro with 241
E           AttributeError: 'OutputsSerializerToString' object has no attribute 'serialized_string'. Did you mean: 'serialized_string1'?

.tox\test-api\lib\site-packages\ansys\dpf\core\core.py:314: AttributeError
tests\test_meshregion.py::test_meshregion::test_mesh_deep_copy[gRPC CLayer]
Stack Traces | 0.12s run time
allkindofcomplexity = 'D:\\a\\pydpf-core\\pydpf-core\\.tox\\test-api\\lib\\site-packages\\ansys\\dpf\\core\\examples\\result_files\\testing\\allKindOfComplexity.rst'
server_type = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D08019C970>

    @pytest.mark.slow
    def test_mesh_deep_copy(allkindofcomplexity, server_type):
        # Small mesh
        model = dpf.core.Model(allkindofcomplexity, server=server_type)
        mesh = model.metadata.meshed_region
>       copy = mesh.deep_copy()

tests\test_meshregion.py:580: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox\test-api\lib\site-packages\ansys\dpf\core\meshed_region.py:665: in deep_copy
    return _deep_copy(self, server=server)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

dpf_entity = <ansys.dpf.core.meshed_region.MeshedRegion object at 0x000002D085309870>
server = None

    def _deep_copy(dpf_entity, server=None):
        """Return a copy of the entity in the requested server.
    
        Parameters
        ----------
        dpf_entity: core.Operator, core.Workflow, core.Scoping,
                    core.Field, core.FieldsContainer, core.MeshedRegion...
            Dpf entity to deep_copy
    
        server : server.DPFServer, optional
            Server with channel connected to the remote or local instance. When
            ``None``, attempts to use the global server.
    
        Returns
        -------
           deep_copy of dpf_entity: core.Operator, core.Workflow, core.Scoping,
                                    core.Field, core.FieldsContainer, core.MeshedRegion...
        """
        from ansys.dpf.core.common import types, types_enum_to_types
        from ansys.dpf.core.operators.serialization import serializer_to_string, string_deserializer
    
        entity_server = dpf_entity._server if hasattr(dpf_entity, "_server") else None
        serializer = serializer_to_string(server=entity_server)
        serializer.connect(1, dpf_entity)
        deserializer = string_deserializer(server=server)
        stream_type = 1 if server_meet_version("8.0", serializer._server) else 0
        serializer.connect(-1, stream_type)
        if stream_type == 1:
            out = serializer.get_output(0, types.bytes)
        else:
>           out = serializer.outputs.serialized_string  # Required for retro with 241
E           AttributeError: 'OutputsSerializerToString' object has no attribute 'serialized_string'. Did you mean: 'serialized_string1'?

.tox\test-api\lib\site-packages\ansys\dpf\core\core.py:314: AttributeError
tests/test_mesh_info.py::test_output_mesh_info_provider_fluent[in Process CLayer]
Stack Traces | 0.199s run time
server_clayer = <ansys.dpf.core.server_types.InProcessServer object at 0x7f510cddbd90>

    @pytest.mark.skipif(
        not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0, reason="Available for servers >=7.0"
    )
    def test_output_mesh_info_provider_fluent(server_clayer):
        ds = dpf.DataSources(server=server_clayer)
        files = examples.download_fluent_multi_species()
        ds.set_result_file_path(files["cas"], "cas")
    
        mesh_info = dpf.operators.metadata.mesh_info_provider(server=server_clayer)
        mesh_info.connect(4, ds)
>       mesh_info_out = mesh_info.outputs.mesh_info()

tests/test_mesh_info.py:182: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
..../test-api/lib/python3.10.../dpf/core/outputs.py:106: in __call__
    return self.get_data()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ansys.dpf.core.outputs.Output object at 0x7f510cd97220>

    def get_data(self) -> T:
        """Retrieve the output of the operator."""
        type_output = self._spec.type_names[0]
    
        if type_output == "abstract_meshed_region":
            type_output = types.meshed_region
        elif type_output == "abstract_data_tree":
            type_output = types.data_tree
        elif type_output == "fields_container":
            type_output = types.fields_container
        elif type_output == "scopings_container":
            type_output = types.scopings_container
        elif type_output == "meshes_container":
            type_output = types.meshes_container
        elif type_output == "streams_container":
            type_output = types.streams_container
        elif type_output == "vector<double>":
            type_output = types.vec_double
        elif type_output == "vector<int32>":
            type_output = types.vec_int
        elif type_output == "int32":
            type_output = types.int
    
        output = self._operator.get_output(self._pin, type_output)
    
        type_output_derive_class = self._spec.name_derived_class
        if type_output_derive_class == "":
            return output
    
        from ansys.dpf.core.common import derived_class_name_to_type
    
>       derived_type = derived_class_name_to_type().get(type_output_derive_class)
E       TypeError: unhashable type: 'list'

..../test-api/lib/python3.10.../dpf/core/outputs.py:93: TypeError
tests/test_mesh_info.py::test_output_mesh_info_provider_fluent[gRPC CLayer]
Stack Traces | 0.512s run time
server_clayer = <ansys.dpf.core.server_types.GrpcServer object at 0x7f5144e17b20>

    @pytest.mark.skipif(
        not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_7_0, reason="Available for servers >=7.0"
    )
    def test_output_mesh_info_provider_fluent(server_clayer):
        ds = dpf.DataSources(server=server_clayer)
        files = examples.download_fluent_multi_species()
        ds.set_result_file_path(files["cas"], "cas")
    
        mesh_info = dpf.operators.metadata.mesh_info_provider(server=server_clayer)
        mesh_info.connect(4, ds)
>       mesh_info_out = mesh_info.outputs.mesh_info()

tests/test_mesh_info.py:182: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
..../test-api/lib/python3.10.../dpf/core/outputs.py:106: in __call__
    return self.get_data()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ansys.dpf.core.outputs.Output object at 0x7f510cdd8ca0>

    def get_data(self) -> T:
        """Retrieve the output of the operator."""
        type_output = self._spec.type_names[0]
    
        if type_output == "abstract_meshed_region":
            type_output = types.meshed_region
        elif type_output == "abstract_data_tree":
            type_output = types.data_tree
        elif type_output == "fields_container":
            type_output = types.fields_container
        elif type_output == "scopings_container":
            type_output = types.scopings_container
        elif type_output == "meshes_container":
            type_output = types.meshes_container
        elif type_output == "streams_container":
            type_output = types.streams_container
        elif type_output == "vector<double>":
            type_output = types.vec_double
        elif type_output == "vector<int32>":
            type_output = types.vec_int
        elif type_output == "int32":
            type_output = types.int
    
        output = self._operator.get_output(self._pin, type_output)
    
        type_output_derive_class = self._spec.name_derived_class
        if type_output_derive_class == "":
            return output
    
        from ansys.dpf.core.common import derived_class_name_to_type
    
>       derived_type = derived_class_name_to_type().get(type_output_derive_class)
E       TypeError: unhashable type: 'list'

..../test-api/lib/python3.10.../dpf/core/outputs.py:93: TypeError
tests\test_field.py::test_field::test_deep_copy_elemental_nodal_field
Stack Traces | 0.877s run time
allkindofcomplexity = 'D:\\a\\pydpf-core\\pydpf-core\\.tox\\test-api\\lib\\site-packages\\ansys\\dpf\\core\\examples\\result_files\\testing\\allKindOfComplexity.rst'

    @pytest.mark.slow
    def test_deep_copy_elemental_nodal_field(allkindofcomplexity):
        model = dpf.core.Model(allkindofcomplexity)
        stress = model.results.stress()
        field = stress.outputs.fields_container()[0]
        copy = field.deep_copy()
        iden = dpf.core.operators.logic.identical_fields(field, copy)
    
        try:
            assert iden.outputs.boolean()
        except AssertionError as e:
            print(iden.outputs.message())
            raise e
    
        mesh = field.meshed_region
        copy = copy.meshed_region
>       assert np.allclose(copy.nodes.scoping.ids, mesh.nodes.scoping.ids)

tests\test_field.py:1227: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.tox\test-api\lib\site-packages\numpy\_core\numeric.py:2329: in allclose
    res = all(isclose(a, b, rtol=rtol, atol=atol, equal_nan=equal_nan))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

a = DPFArray([], dtype=int32)
b = DPFArray([    1,     2,     3, ..., 15127, 15128, 15129],
         shape=(15129,), dtype=int32)
rtol = 1e-05, atol = 1e-08, equal_nan = False

    @array_function_dispatch(_isclose_dispatcher)
    def isclose(a, b, rtol=1.e-5, atol=1.e-8, equal_nan=False):
        """
        Returns a boolean array where two arrays are element-wise equal within a
        tolerance.
    
        The tolerance values are positive, typically very small numbers.  The
        relative difference (`rtol` * abs(`b`)) and the absolute difference
        `atol` are added together to compare against the absolute difference
        between `a` and `b`.
    
        .. warning:: The default `atol` is not appropriate for comparing numbers
                     with magnitudes much smaller than one (see Notes).
    
        Parameters
        ----------
        a, b : array_like
            Input arrays to compare.
        rtol : array_like
            The relative tolerance parameter (see Notes).
        atol : array_like
            The absolute tolerance parameter (see Notes).
        equal_nan : bool
            Whether to compare NaN's as equal.  If True, NaN's in `a` will be
            considered equal to NaN's in `b` in the output array.
    
        Returns
        -------
        y : array_like
            Returns a boolean array of where `a` and `b` are equal within the
            given tolerance. If both `a` and `b` are scalars, returns a single
            boolean value.
    
        See Also
        --------
        allclose
        math.isclose
    
        Notes
        -----
        For finite values, isclose uses the following equation to test whether
        two floating point values are equivalent.::
    
         absolute(a - b) <= (atol + rtol * absolute(b))
    
        Unlike the built-in `math.isclose`, the above equation is not symmetric
        in `a` and `b` -- it assumes `b` is the reference value -- so that
        `isclose(a, b)` might be different from `isclose(b, a)`.
    
        The default value of `atol` is not appropriate when the reference value
        `b` has magnitude smaller than one. For example, it is unlikely that
        ``a = 1e-9`` and ``b = 2e-9`` should be considered "close", yet
        ``isclose(1e-9, 2e-9)`` is ``True`` with default settings. Be sure
        to select `atol` for the use case at hand, especially for defining the
        threshold below which a non-zero value in `a` will be considered "close"
        to a very small or zero value in `b`.
    
        `isclose` is not defined for non-numeric data types.
        :class:`bool` is considered a numeric data-type for this purpose.
    
        Examples
        --------
        >>> import numpy as np
        >>> np.isclose([1e10,1e-7], [1.00001e10,1e-8])
        array([ True, False])
    
        >>> np.isclose([1e10,1e-8], [1.00001e10,1e-9])
        array([ True, True])
    
        >>> np.isclose([1e10,1e-8], [1.0001e10,1e-9])
        array([False,  True])
    
        >>> np.isclose([1.0, np.nan], [1.0, np.nan])
        array([ True, False])
    
        >>> np.isclose([1.0, np.nan], [1.0, np.nan], equal_nan=True)
        array([ True, True])
    
        >>> np.isclose([1e-8, 1e-7], [0.0, 0.0])
        array([ True, False])
    
        >>> np.isclose([1e-100, 1e-7], [0.0, 0.0], atol=0.0)
        array([False, False])
    
        >>> np.isclose([1e-10, 1e-10], [1e-20, 0.0])
        array([ True,  True])
    
        >>> np.isclose([1e-10, 1e-10], [1e-20, 0.999999e-10], atol=0.0)
        array([False,  True])
    
        """
        # Turn all but python scalars into arrays.
        x, y, atol, rtol = (
            a if isinstance(a, (int, float, complex)) else asanyarray(a)
            for a in (a, b, atol, rtol))
    
        # Make sure y is an inexact type to avoid bad behavior on abs(MIN_INT).
        # This will cause casting of x later. Also, make sure to allow subclasses
        # (e.g., for numpy.ma).
        # NOTE: We explicitly allow timedelta, which used to work. This could
        #       possibly be deprecated. See also gh-18286.
        #       timedelta works if `atol` is an integer or also a timedelta.
        #       Although, the default tolerances are unlikely to be useful
        if (dtype := getattr(y, "dtype", None)) is not None and dtype.kind != "m":
            dt = multiarray.result_type(y, 1.)
            y = asanyarray(y, dtype=dt)
        elif isinstance(y, int):
            y = float(y)
    
        with errstate(invalid='ignore'):
>           result = (less_equal(abs(x-y), atol + rtol * abs(y))
                      & isfinite(y)
                      | (x == y))
E           ValueError: operands could not be broadcast together with shapes (0,) (15129,)

.tox\test-api\lib\site-packages\numpy\_core\numeric.py:2447: ValueError
tests\test_python_plugins.py::test_python_plugins::test_integral_types[ansys-grpc-dpf]
Stack Traces | 3.07s run time
server_type_remote_process = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_integral_types(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
>       op = dpf.Operator("custom_forward_int", server=server_type_remote_process)

tests\test_python_plugins.py:83: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0810892A0>
name = 'custom_forward_int', config = None
server = <ansys.dpf.core.server_types.LegacyGrpcServer object at 0x000002D0804F76D0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError
tests\test_python_plugins.py::test_python_plugins::test_integral_types[gRPC CLayer]
Stack Traces | 3.11s run time
server_type_remote_process = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
testfiles_dir = 'D:\\a\\pydpf-core\\pydpf-core\\tests\\testfiles'

    def test_integral_types(server_type_remote_process, testfiles_dir):
        load_all_types_plugin_with_serv(server_type_remote_process, testfiles_dir)
>       op = dpf.Operator("custom_forward_int", server=server_type_remote_process)

tests\test_python_plugins.py:83: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

self = <ansys.dpf.core.dpf_operator.Operator object at 0x000002D0808B4BB0>
name = 'custom_forward_int', config = None
server = <ansys.dpf.core.server_types.GrpcServer object at 0x000002D0ED614DC0>
operator = None, inputs_type = <class 'ansys.dpf.core.inputs.Inputs'>
outputs_type = <class 'ansys.dpf.core.outputs.Outputs'>

    def __init__(
        self,
        name: str = None,
        config: Config = None,
        server: AnyServerType = None,
        operator: Operator | int = None,
        inputs_type: type[_Inputs] = Inputs,
        outputs_type: type[_Outputs] = Outputs,
    ):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._inputs_class = inputs_type
        self._outputs_class = outputs_type
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
            raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
    
        self._spec = Specification(operator_name=self.name, server=self._server)
        # add dynamic inputs if no specific Inputs subclass is used
        if len(self._spec.inputs) > 0 and self._inputs_class == Inputs:
            self._inputs = self._inputs_class(self._spec.inputs, self)
        else:
>           self._inputs = self._inputs_class(self)
E           TypeError: Inputs.__init__() missing 1 required positional argument: 'operator'

.tox\test-api\lib\site-packages\ansys\dpf\core\dpf_operator.py:204: TypeError

To view more test analytics, go to the Test Analytics Dashboard
📋 Got 3 mins? Take this short survey to help us improve Test Analytics.

@PProfizi PProfizi merged commit b033406 into fix/fix_generated_operators_outputs_class Nov 17, 2025
24 of 43 checks passed
@PProfizi PProfizi deleted the maint/update_code_for_261_on_fix/fix_generated_operators_outputs_class branch November 17, 2025 13:40
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment

Labels

server-sync DO NOT USE, Related to automatic synchronization with the server

Projects

None yet

Development

Successfully merging this pull request may close these issues.

3 participants