diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 50fe891d2be..976156c5e3e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -125,6 +125,7 @@ jobs: with: ANSYS_VERSION: ${{ inputs.ansys_version || vars.ANSYS_VERSION_DEFAULT }} standalone_suffix: ${{needs.pick_server_suffix.outputs.suffix}} + test_any: true secrets: inherit docker_examples: @@ -135,6 +136,7 @@ jobs: ANSYS_VERSION: ${{ inputs.ansys_version || vars.ANSYS_VERSION_DEFAULT }} python_versions: '["3.10"]' standalone_suffix: ${{needs.pick_server_suffix.outputs.suffix}} + test_any: true secrets: inherit docs: diff --git a/.github/workflows/ci_release.yml b/.github/workflows/ci_release.yml index af4e2e266ad..f3c0fe6cafa 100644 --- a/.github/workflows/ci_release.yml +++ b/.github/workflows/ci_release.yml @@ -168,6 +168,7 @@ jobs: with: ANSYS_VERSION: ${{ inputs.ansys_version || vars.ANSYS_VERSION_LAST_RELEASED }} standalone_suffix: ${{ github.event.inputs.standalone_branch_suffix || '' }} + test_any: true secrets: inherit docker_examples: @@ -177,6 +178,7 @@ jobs: ANSYS_VERSION: ${{ inputs.ansys_version || vars.ANSYS_VERSION_LAST_RELEASED }} python_versions: '["3.10", "3.11", "3.12", "3.13"]' standalone_suffix: ${{ github.event.inputs.standalone_branch_suffix || '' }} + test_any: true secrets: inherit draft_release: diff --git a/.github/workflows/examples_docker.yml b/.github/workflows/examples_docker.yml index 88f442270e6..fb937b0f4c2 100644 --- a/.github/workflows/examples_docker.yml +++ b/.github/workflows/examples_docker.yml @@ -16,6 +16,11 @@ on: required: false type: string default: '' + test_any: + description: "Test the any version of the wheel" + required: false + type: string + default: 'false' # Can be called manually workflow_dispatch: inputs: @@ -33,6 +38,11 @@ on: required: false type: string default: '' + test_any: + description: "Test the any version of the wheel" + required: false + type: string + default: 'false' env: PACKAGE_NAME: ansys-dpf-core @@ -76,7 +86,9 @@ jobs: - name: "Build the wheel" shell: bash run: | - if [ ${{ matrix.os }} == "ubuntu-latest" ]; then + if [ ${{ inputs.test_any }} == 'true' ]; then + export platform="any" + elif [ ${{ matrix.os }} == "ubuntu-latest" ]; then export platform="manylinux_2_17" else export platform="win" diff --git a/.github/workflows/test_docker.yml b/.github/workflows/test_docker.yml index 6470e81fe8a..6323ab74726 100644 --- a/.github/workflows/test_docker.yml +++ b/.github/workflows/test_docker.yml @@ -12,6 +12,11 @@ on: ANSYS_VERSION: required: false type: string + test_any: + description: "Test the any version of the wheel" + required: false + type: string + default: 'false' # Can be called manually workflow_dispatch: inputs: @@ -24,6 +29,11 @@ on: description: "ANSYS version to run." required: false type: string + test_any: + description: "Test the any version of the wheel" + required: false + type: string + default: 'false' env: PACKAGE_NAME: ansys-dpf-core @@ -58,7 +68,9 @@ jobs: - name: "Build the wheel" shell: bash run: | - if [ ${{ matrix.os }} == "ubuntu-latest" ]; then + if [ ${{ inputs.test_any }} == 'true' ]; then + export platform="any" + elif [ ${{ matrix.os }} == "ubuntu-latest" ]; then export platform="manylinux_2_17" else export platform="win" diff --git a/src/ansys/dpf/core/meshed_region.py b/src/ansys/dpf/core/meshed_region.py index add3e4f95d4..4bc4411aa42 100644 --- a/src/ansys/dpf/core/meshed_region.py +++ b/src/ansys/dpf/core/meshed_region.py @@ -639,7 +639,8 @@ def deep_copy(self, server=None): >>> deep_copy = meshed_region.deep_copy(server=other_server) """ - if self._server.config.legacy: + server = server_module.get_or_create_server(server) + if self._server.config.legacy or server.config.legacy: if self.nodes.scoping is None: # empty Mesh return MeshedRegion() node_ids = self.nodes.scoping.ids diff --git a/tests/conftest.py b/tests/conftest.py index f241ee23aa4..d4a69b390ec 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -52,6 +52,9 @@ running_docker = ansys.dpf.core.server_types.RUNNING_DOCKER.use_docker local_test_repo = False +# Detect if gatebin binaries are available +IS_USING_GATEBIN = _try_use_gatebin() + def _get_test_files_directory(): if local_test_repo is False: @@ -72,6 +75,10 @@ def _get_test_files_directory(): ) +# Start a first global server to test for version +global_server = core.start_local_server(config=core.AvailableServerConfigs.LegacyGrpcServer) + + @pytest.hookimpl() def pytest_sessionfinish(session, exitstatus): if os.name == "posix": @@ -376,9 +383,6 @@ def return_ds(server=None): ) -IS_USING_GATEBIN = _try_use_gatebin() - - def raises_for_servers_version_under(version): """Launch the test normally if the server version is equal or higher than the "version" parameter. Else it makes sure that the test fails by raising a "DpfVersionNotSupported" @@ -419,8 +423,11 @@ def remove_none_available_config(configs, config_names): configs_out.append(conf) config_names_out.append(conf_name) elif running_docker: + unavailable_configs = [core.AvailableServerConfigs.InProcessServer] + if not IS_USING_GATEBIN: + unavailable_configs.append(core.AvailableServerConfigs.GrpcServer) for conf, conf_name in zip(configs, config_names): - if conf != core.AvailableServerConfigs.InProcessServer: + if conf not in unavailable_configs: configs_out.append(conf) config_names_out.append(conf_name) diff --git a/tests/slow/test_remoteworkflow.py b/tests/slow/test_remoteworkflow.py index 357cddf6c8f..10525f986f7 100644 --- a/tests/slow/test_remoteworkflow.py +++ b/tests/slow/test_remoteworkflow.py @@ -420,10 +420,13 @@ def test_multi_process_transparent_api_remote_workflow(): workflows.append(wf) - local_wf = core.Workflow() + # Make sure to reuse the same type of remote server as for the previous ones: + # Cannot merge a Workflow from a non-legacy grpc server to a workflow on a legacy grpc server + merge_server = local_servers[len(files)] + local_wf = core.Workflow(server=merge_server) local_wf.progress_bar = False - merge = ops.utility.merge_fields_containers() - min_max = ops.min_max.min_max_fc(merge) + merge = ops.utility.merge_fields_containers(server=merge_server) + min_max = ops.min_max.min_max_fc(merge, server=merge_server) local_wf.add_operator(merge) local_wf.add_operator(min_max) local_wf.set_output_name("tot_output", min_max.outputs.field_max) @@ -454,10 +457,13 @@ def test_multi_process_with_names_transparent_api_remote_workflow(): workflows.append(wf) - local_wf = core.Workflow() + # Make sure to reuse the same type of remote server as for the previous ones: + # Cannot merge a Workflow from a non-legacy grpc server to a workflow on a legacy grpc server + merge_server = local_servers[len(files)] + local_wf = core.Workflow(server=merge_server) local_wf.progress_bar = False - merge = ops.utility.merge_fields_containers() - min_max = ops.min_max.min_max_fc(merge) + merge = ops.utility.merge_fields_containers(server=merge_server) + min_max = ops.min_max.min_max_fc(merge, server=merge_server) local_wf.add_operator(merge) local_wf.add_operator(min_max) local_wf.set_output_name("tot_output", min_max.outputs.field_max) @@ -546,25 +552,28 @@ def test_multi_process_transparent_api_connect_local_op_remote_workflow(): ) def test_multi_process_transparent_api_create_on_local_remote_workflow(): files = examples.download_distributed_files() - wf = core.Workflow() + # Make sure to reuse the same type of remote server as for the previous ones: + # Cannot merge a Workflow from a non-legacy grpc server to a workflow on a legacy grpc server + merge_server = local_servers[len(files)] + wf = core.Workflow(server=merge_server) wf.progress_bar = False - op = ops.result.displacement() - average = core.operators.math.norm_fc(op) + op = ops.result.displacement(server=merge_server) + average = core.operators.math.norm_fc(op, server=merge_server) wf.add_operators([op, average]) wf.set_output_name("distrib", average.outputs.fields_container) wf.set_input_name("ds", op.inputs.data_sources) - local_wf = core.Workflow() + local_wf = core.Workflow(server=merge_server) local_wf.progress_bar = False - merge = ops.utility.merge_fields_containers() - min_max = ops.min_max.min_max_fc(merge) + merge = ops.utility.merge_fields_containers(server=merge_server) + min_max = ops.min_max.min_max_fc(merge, server=merge_server) local_wf.add_operator(merge) local_wf.add_operator(min_max) local_wf.set_output_name("tot_output", min_max.outputs.field_max) for i in files: - data_sources1 = core.DataSources(files[i]) + data_sources1 = core.DataSources(files[i], server=merge_server) remote_wf = wf.create_on_other_server(server=local_servers[i]) remote_wf.connect("ds", data_sources1) local_wf.set_input_name("distrib" + str(i), merge, i) diff --git a/tests/test_python_plugins.py b/tests/test_python_plugins.py index 3abfe1a33c2..01c4ccd2da9 100644 --- a/tests/test_python_plugins.py +++ b/tests/test_python_plugins.py @@ -20,7 +20,6 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -import os from pathlib import Path import platform @@ -37,6 +36,7 @@ PinSpecification, SpecificationProperties, ) +from ansys.dpf.core.server_factory import CommunicationProtocols import conftest from conftest import ( SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_4_0, @@ -46,16 +46,14 @@ if not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_4_0: pytest.skip("Requires server version higher than 4.0", allow_module_level=True) -# if platform.python_version().startswith("3.7"): -# pytest.skip( -# "Known failures in the GitHub pipelines for 3.7", -# allow_module_level=True -# ) + if platform.system() == "Linux": pytest.skip("Known failures for the Ubuntu-latest GitHub pipelines", allow_module_level=True) -update_virtual_environment_for_custom_operators(restore_original=True) -update_virtual_environment_for_custom_operators() +# Updating the dpf-site.zip of a DPF Server is only available when InProcess. +if dpf.SERVER.config.protocol == CommunicationProtocols.InProcess: + update_virtual_environment_for_custom_operators(restore_original=True) + update_virtual_environment_for_custom_operators() @pytest.fixture(scope="module") diff --git a/tests/test_resultinfo.py b/tests/test_resultinfo.py index 449bfc385ee..123d6327240 100644 --- a/tests/test_resultinfo.py +++ b/tests/test_resultinfo.py @@ -315,7 +315,7 @@ def test_create_result_info(server_type): """ assert str(result_info) == ref with pytest.raises(ValueError, match="requires"): - _ = dpf.core.ResultInfo() + _ = dpf.core.ResultInfo(server=server_type) else: with pytest.raises(NotImplementedError, match="Cannot create a new ResultInfo via gRPC."): _ = dpf.core.ResultInfo(