Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ jobs:
with:
ANSYS_VERSION: ${{ inputs.ansys_version || vars.ANSYS_VERSION_DEFAULT }}
standalone_suffix: ${{needs.pick_server_suffix.outputs.suffix}}
test_any: true
secrets: inherit

docker_examples:
Expand All @@ -135,6 +136,7 @@ jobs:
ANSYS_VERSION: ${{ inputs.ansys_version || vars.ANSYS_VERSION_DEFAULT }}
python_versions: '["3.10"]'
standalone_suffix: ${{needs.pick_server_suffix.outputs.suffix}}
test_any: true
secrets: inherit

docs:
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/ci_release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,7 @@ jobs:
with:
ANSYS_VERSION: ${{ inputs.ansys_version || vars.ANSYS_VERSION_LAST_RELEASED }}
standalone_suffix: ${{ github.event.inputs.standalone_branch_suffix || '' }}
test_any: true
secrets: inherit

docker_examples:
Expand All @@ -177,6 +178,7 @@ jobs:
ANSYS_VERSION: ${{ inputs.ansys_version || vars.ANSYS_VERSION_LAST_RELEASED }}
python_versions: '["3.10", "3.11", "3.12", "3.13"]'
standalone_suffix: ${{ github.event.inputs.standalone_branch_suffix || '' }}
test_any: true
secrets: inherit

draft_release:
Expand Down
14 changes: 13 additions & 1 deletion .github/workflows/examples_docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@ on:
required: false
type: string
default: ''
test_any:
description: "Test the any version of the wheel"
required: false
type: string
default: 'false'
# Can be called manually
workflow_dispatch:
inputs:
Expand All @@ -33,6 +38,11 @@ on:
required: false
type: string
default: ''
test_any:
description: "Test the any version of the wheel"
required: false
type: string
default: 'false'

env:
PACKAGE_NAME: ansys-dpf-core
Expand Down Expand Up @@ -76,7 +86,9 @@ jobs:
- name: "Build the wheel"
shell: bash
run: |
if [ ${{ matrix.os }} == "ubuntu-latest" ]; then
if [ ${{ inputs.test_any }} == 'true' ]; then
export platform="any"
elif [ ${{ matrix.os }} == "ubuntu-latest" ]; then
export platform="manylinux_2_17"
else
export platform="win"
Expand Down
14 changes: 13 additions & 1 deletion .github/workflows/test_docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,11 @@ on:
ANSYS_VERSION:
required: false
type: string
test_any:
description: "Test the any version of the wheel"
required: false
type: string
default: 'false'
# Can be called manually
workflow_dispatch:
inputs:
Expand All @@ -24,6 +29,11 @@ on:
description: "ANSYS version to run."
required: false
type: string
test_any:
description: "Test the any version of the wheel"
required: false
type: string
default: 'false'

env:
PACKAGE_NAME: ansys-dpf-core
Expand Down Expand Up @@ -58,7 +68,9 @@ jobs:
- name: "Build the wheel"
shell: bash
run: |
if [ ${{ matrix.os }} == "ubuntu-latest" ]; then
if [ ${{ inputs.test_any }} == 'true' ]; then
export platform="any"
elif [ ${{ matrix.os }} == "ubuntu-latest" ]; then
export platform="manylinux_2_17"
else
export platform="win"
Expand Down
3 changes: 2 additions & 1 deletion src/ansys/dpf/core/meshed_region.py
Original file line number Diff line number Diff line change
Expand Up @@ -639,7 +639,8 @@ def deep_copy(self, server=None):
>>> deep_copy = meshed_region.deep_copy(server=other_server)

"""
if self._server.config.legacy:
server = server_module.get_or_create_server(server)
if self._server.config.legacy or server.config.legacy:
if self.nodes.scoping is None: # empty Mesh
return MeshedRegion()
node_ids = self.nodes.scoping.ids
Expand Down
15 changes: 11 additions & 4 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,9 @@
running_docker = ansys.dpf.core.server_types.RUNNING_DOCKER.use_docker
local_test_repo = False

# Detect if gatebin binaries are available
IS_USING_GATEBIN = _try_use_gatebin()


def _get_test_files_directory():
if local_test_repo is False:
Expand All @@ -72,6 +75,10 @@ def _get_test_files_directory():
)


# Start a first global server to test for version
global_server = core.start_local_server(config=core.AvailableServerConfigs.LegacyGrpcServer)


@pytest.hookimpl()
def pytest_sessionfinish(session, exitstatus):
if os.name == "posix":
Expand Down Expand Up @@ -376,9 +383,6 @@ def return_ds(server=None):
)


IS_USING_GATEBIN = _try_use_gatebin()


def raises_for_servers_version_under(version):
"""Launch the test normally if the server version is equal or higher than the "version"
parameter. Else it makes sure that the test fails by raising a "DpfVersionNotSupported"
Expand Down Expand Up @@ -419,8 +423,11 @@ def remove_none_available_config(configs, config_names):
configs_out.append(conf)
config_names_out.append(conf_name)
elif running_docker:
unavailable_configs = [core.AvailableServerConfigs.InProcessServer]
if not IS_USING_GATEBIN:
unavailable_configs.append(core.AvailableServerConfigs.GrpcServer)
for conf, conf_name in zip(configs, config_names):
if conf != core.AvailableServerConfigs.InProcessServer:
if conf not in unavailable_configs:
configs_out.append(conf)
config_names_out.append(conf_name)

Expand Down
35 changes: 22 additions & 13 deletions tests/slow/test_remoteworkflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -420,10 +420,13 @@ def test_multi_process_transparent_api_remote_workflow():

workflows.append(wf)

local_wf = core.Workflow()
# Make sure to reuse the same type of remote server as for the previous ones:
# Cannot merge a Workflow from a non-legacy grpc server to a workflow on a legacy grpc server
merge_server = local_servers[len(files)]
local_wf = core.Workflow(server=merge_server)
local_wf.progress_bar = False
merge = ops.utility.merge_fields_containers()
min_max = ops.min_max.min_max_fc(merge)
merge = ops.utility.merge_fields_containers(server=merge_server)
min_max = ops.min_max.min_max_fc(merge, server=merge_server)
local_wf.add_operator(merge)
local_wf.add_operator(min_max)
local_wf.set_output_name("tot_output", min_max.outputs.field_max)
Expand Down Expand Up @@ -454,10 +457,13 @@ def test_multi_process_with_names_transparent_api_remote_workflow():

workflows.append(wf)

local_wf = core.Workflow()
# Make sure to reuse the same type of remote server as for the previous ones:
# Cannot merge a Workflow from a non-legacy grpc server to a workflow on a legacy grpc server
merge_server = local_servers[len(files)]
local_wf = core.Workflow(server=merge_server)
local_wf.progress_bar = False
merge = ops.utility.merge_fields_containers()
min_max = ops.min_max.min_max_fc(merge)
merge = ops.utility.merge_fields_containers(server=merge_server)
min_max = ops.min_max.min_max_fc(merge, server=merge_server)
local_wf.add_operator(merge)
local_wf.add_operator(min_max)
local_wf.set_output_name("tot_output", min_max.outputs.field_max)
Expand Down Expand Up @@ -546,25 +552,28 @@ def test_multi_process_transparent_api_connect_local_op_remote_workflow():
)
def test_multi_process_transparent_api_create_on_local_remote_workflow():
files = examples.download_distributed_files()
wf = core.Workflow()
# Make sure to reuse the same type of remote server as for the previous ones:
# Cannot merge a Workflow from a non-legacy grpc server to a workflow on a legacy grpc server
merge_server = local_servers[len(files)]
wf = core.Workflow(server=merge_server)
wf.progress_bar = False
op = ops.result.displacement()
average = core.operators.math.norm_fc(op)
op = ops.result.displacement(server=merge_server)
average = core.operators.math.norm_fc(op, server=merge_server)

wf.add_operators([op, average])
wf.set_output_name("distrib", average.outputs.fields_container)
wf.set_input_name("ds", op.inputs.data_sources)

local_wf = core.Workflow()
local_wf = core.Workflow(server=merge_server)
local_wf.progress_bar = False
merge = ops.utility.merge_fields_containers()
min_max = ops.min_max.min_max_fc(merge)
merge = ops.utility.merge_fields_containers(server=merge_server)
min_max = ops.min_max.min_max_fc(merge, server=merge_server)
local_wf.add_operator(merge)
local_wf.add_operator(min_max)
local_wf.set_output_name("tot_output", min_max.outputs.field_max)

for i in files:
data_sources1 = core.DataSources(files[i])
data_sources1 = core.DataSources(files[i], server=merge_server)
remote_wf = wf.create_on_other_server(server=local_servers[i])
remote_wf.connect("ds", data_sources1)
local_wf.set_input_name("distrib" + str(i), merge, i)
Expand Down
14 changes: 6 additions & 8 deletions tests/test_python_plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

import os
from pathlib import Path
import platform

Expand All @@ -37,6 +36,7 @@
PinSpecification,
SpecificationProperties,
)
from ansys.dpf.core.server_factory import CommunicationProtocols
import conftest
from conftest import (
SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_4_0,
Expand All @@ -46,16 +46,14 @@

if not SERVERS_VERSION_GREATER_THAN_OR_EQUAL_TO_4_0:
pytest.skip("Requires server version higher than 4.0", allow_module_level=True)
# if platform.python_version().startswith("3.7"):
# pytest.skip(
# "Known failures in the GitHub pipelines for 3.7",
# allow_module_level=True
# )

if platform.system() == "Linux":
pytest.skip("Known failures for the Ubuntu-latest GitHub pipelines", allow_module_level=True)

update_virtual_environment_for_custom_operators(restore_original=True)
update_virtual_environment_for_custom_operators()
# Updating the dpf-site.zip of a DPF Server is only available when InProcess.
if dpf.SERVER.config.protocol == CommunicationProtocols.InProcess:
update_virtual_environment_for_custom_operators(restore_original=True)
update_virtual_environment_for_custom_operators()


@pytest.fixture(scope="module")
Expand Down
2 changes: 1 addition & 1 deletion tests/test_resultinfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ def test_create_result_info(server_type):
"""
assert str(result_info) == ref
with pytest.raises(ValueError, match="requires"):
_ = dpf.core.ResultInfo()
_ = dpf.core.ResultInfo(server=server_type)
else:
with pytest.raises(NotImplementedError, match="Cannot create a new ResultInfo via gRPC."):
_ = dpf.core.ResultInfo(
Expand Down
Loading