From 6770ca32e260e0d81183f3e93f1c149985a5aeef Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Tue, 8 Mar 2022 11:21:20 +0100 Subject: [PATCH 1/6] Issue#186: enable os.PathLike objects as arguments in pydpf-core - modifications and updated docstrings, no tests --- ansys/dpf/core/core.py | 70 +++++++++---------- ansys/dpf/core/data_sources.py | 18 ++--- ansys/dpf/core/inputs.py | 10 ++- ansys/dpf/core/operators/mesh/stl_export.py | 4 +- .../core/operators/result/pres_to_field.py | 2 +- .../core/operators/result/prns_to_field.py | 2 +- ansys/dpf/core/operators/result/run.py | 4 +- .../operators/serialization/deserializer.py | 2 +- .../serialization/serialize_to_hdf5.py | 2 +- .../operators/serialization/vtk_export.py | 2 +- ansys/dpf/core/operators/utility/html_doc.py | 2 +- .../operators/utility/python_generator.py | 8 +-- ansys/dpf/core/path_utilities.py | 7 +- ansys/dpf/core/server.py | 22 +++--- 14 files changed, 83 insertions(+), 72 deletions(-) diff --git a/ansys/dpf/core/core.py b/ansys/dpf/core/core.py index d4ef8677dbf..a117f0ec9c2 100644 --- a/ansys/dpf/core/core.py +++ b/ansys/dpf/core/core.py @@ -34,7 +34,7 @@ def load_library(filename, name="", symbol="LoadOperators", server=None): Parameters ---------- - filename : str + filename : str or os.PathLike Filename of the operator library. name : str, optional @@ -64,7 +64,7 @@ def upload_file_in_tmp_folder(file_path, new_file_name=None, server=None): Parameters ---------- - file_path : str + file_path : str or os.PathLike file path on the client side to upload new_file_name : str, optional @@ -103,10 +103,10 @@ def upload_files_in_folder( Parameters ---------- - to_server_folder_path : str + to_server_folder_path : str or os.PathLike folder path target where will be uploaded files on the server side - client_folder_path: str + client_folder_path: str or os.PathLike folder path where the files that must be uploaded are located on client side @@ -133,10 +133,10 @@ def download_file(server_file_path, to_client_file_path, server=None): Parameters ---------- - server_file_path : str + server_file_path : str or os.PathLike file path to download on the server side - to_client_file_path: str + to_client_file_path: str or os.PathLike file path target where the file will be located client side server : server.DPFServer, optional @@ -168,10 +168,10 @@ def download_files_in_folder( Parameters ---------- - server_folder_path : str + server_folder_path : str or os.PathLike folder path to download on the server side - to_client_folder_path: str + to_client_folder_path: str or os.PathLike folder path target where the files will be located client side specific_extension (optional) : str @@ -202,10 +202,10 @@ def upload_file(file_path, to_server_file_path, server=None): Parameters ---------- - file_path : str + file_path : str or os.PathLike file path on the client side to upload - to_server_file_path: str + to_server_file_path: str or os.PathLike file path target where the file will be located server side server : server.DPFServer, optional @@ -340,15 +340,15 @@ def make_tmp_dir_server(self): request = base_pb2.Empty() return self._stub.CreateTmpDir(request).server_file_path - def load_library(self, filename, name="", symbol="LoadOperators"): + def load_library(self, file_path, name="", symbol="LoadOperators"): """Dynamically load an operators library for dpf.core. Code containing this library's operators is generated in ansys.dpf.core.operators Parameters ---------- - filename : str - Filename of the operator library. + file_path : str or os.PathLike + file_path of the operator library. name : str, optional Library name. Probably optional @@ -365,13 +365,13 @@ def load_library(self, filename, name="", symbol="LoadOperators"): """ request = base_pb2.PluginRequest() request.name = name - request.dllPath = filename + request.dllPath = str(file_path) request.symbol = symbol try: self._stub.Load(request) except Exception as e: raise IOError( - f'Unable to load library "{filename}". File may not exist or' + f'Unable to load library "{str(file_path)}". File may not exist or' f" is missing dependencies:\n{str(e)}" ) @@ -391,7 +391,7 @@ def load_library(self, filename, name="", symbol="LoadOperators"): code_gen = Operator("python_generator") code_gen.connect(1, TARGET_PATH) - code_gen.connect(0, filename) + code_gen.connect(0, str(file_path)) code_gen.connect(2, False) code_gen.run() @@ -473,10 +473,10 @@ def download_file(self, server_file_path, to_client_file_path): Parameters ---------- - server_file_path : str + server_file_path : str or os.PathLike file path to download on the server side - to_client_file_path: str + to_client_file_path: str or os.PathLike file path target where the file will be located client side Notes @@ -484,7 +484,7 @@ def download_file(self, server_file_path, to_client_file_path): Print a progress bar """ request = base_pb2.DownloadFileRequest() - request.server_file_path = server_file_path + request.server_file_path = str(server_file_path) chunks = self._stub.DownloadFile(request) bar = None tot_size = sys.float_info.max @@ -517,10 +517,10 @@ def download_files_in_folder( Parameters ---------- - server_folder_path : str + server_folder_path : str or os.PathLike folder path to download on the server side - to_client_folder_path: str + to_client_folder_path: str or os.PathLike folder path target where the files will be located client side specific_extension (optional) : str @@ -537,7 +537,7 @@ def download_files_in_folder( """ request = base_pb2.DownloadFileRequest() - request.server_file_path = server_folder_path + request.server_file_path = str(server_folder_path) chunks = self._stub.DownloadFile(request) num_files = 1 @@ -562,13 +562,13 @@ def download_files_in_folder( ): separator = self._get_separator(server_path) server_subpath = server_path.replace( - server_folder_path + separator, "" + str(server_folder_path) + separator, "" ) subdir = "" split = server_subpath.split(separator) n = len(split) i = 0 - to_client_folder_path_copy = to_client_folder_path + to_client_folder_path_copy = str(to_client_folder_path) if n > 1: while i < (n - 1): subdir = split[i] @@ -607,10 +607,10 @@ def upload_files_in_folder( Parameters ---------- - to_server_folder_path : str + to_server_folder_path : str or os.PathLike folder path target where will be uploaded files on the server side - client_folder_path: str + client_folder_path: str or os.PathLike folder path where the files that must be uploaded are located on client side @@ -633,13 +633,13 @@ def upload_files_in_folder( f, filename, server_paths, - to_server_folder_path, + str(to_server_folder_path), subdirectory, ) for file in files: f = os.path.join(root, file) server_paths = self._upload_and_get_server_path( - specific_extension, f, file, server_paths, to_server_folder_path + specific_extension, f, file, server_paths, str(to_server_folder_path) ) break return server_paths @@ -678,10 +678,10 @@ def upload_file(self, file_path, to_server_file_path): Parameters ---------- - file_path : str + file_path : str or os.PathLike file path on the client side to upload - to_server_file_path: str + to_server_file_path: str or os.PathLike file path target where the file will be located server side Returns @@ -694,9 +694,9 @@ def upload_file(self, file_path, to_server_file_path): Print a progress bar """ if os.stat(file_path).st_size == 0: - raise ValueError(file_path + " is empty") + raise ValueError(str(file_path) + " is empty") return self._stub.UploadFile( - self.__file_chunk_yielder(file_path, to_server_file_path) + self.__file_chunk_yielder(str(file_path), str(to_server_file_path)) ).server_file_path @protect_grpc @@ -706,7 +706,7 @@ def upload_file_in_tmp_folder(self, file_path, new_file_name=None): Parameters ---------- - file_path : str + file_path : str or os.PathLike file path on the client side to upload new_file_name : str, optional @@ -727,10 +727,10 @@ def upload_file_in_tmp_folder(self, file_path, new_file_name=None): else: file_name = os.path.basename(file_path) if os.stat(file_path).st_size == 0: - raise ValueError(file_path + " is empty") + raise ValueError(str(file_path) + " is empty") return self._stub.UploadFile( self.__file_chunk_yielder( - file_path=file_path, to_server_file_path=file_name, use_tmp_dir=True + file_path=str(file_path), to_server_file_path=file_name, use_tmp_dir=True ) ).server_file_path diff --git a/ansys/dpf/core/data_sources.py b/ansys/dpf/core/data_sources.py index 85c39bf5162..845ecafa614 100644 --- a/ansys/dpf/core/data_sources.py +++ b/ansys/dpf/core/data_sources.py @@ -21,7 +21,7 @@ class DataSources: Parameters ---------- - result_path : str, optional + result_path : str or os.PathLike object, optional Path of the result. The default is ``None``. data_sources : ansys.grpc.dpf.data_sources_pb2.DataSources gRPC data sources message. The default is ``None``. @@ -68,7 +68,7 @@ def set_result_file_path(self, filepath, key=""): Parameters ---------- - filepath : str + filepath : str or os.PathLike object Path to the result file. key : str, optional Extension of the file, which is used as a key for choosing the correct @@ -89,7 +89,7 @@ def set_result_file_path(self, filepath, key=""): request = data_sources_pb2.UpdateRequest() request.result_path = True request.key = key - request.path = filepath + request.path = str(filepath) request.data_sources.CopyFrom(self._message) self._stub.Update(request) @@ -101,7 +101,7 @@ def set_domain_result_file_path(self, path, domain_id): Parameters ---------- - path: str + path: str or os.PathLike object Path to the file. domain_id: int, optional Domain ID for the distributed files. @@ -118,7 +118,7 @@ def set_domain_result_file_path(self, path, domain_id): request.result_path = True request.domain.domain_path = True request.domain.domain_id = domain_id - request.path = path + request.path = str(path) request.data_sources.CopyFrom(self._message) self._stub.Update(request) @@ -130,7 +130,7 @@ def add_file_path(self, filepath, key="", is_domain: bool = False, domain_id=0): Parameters ---------- - filepath : str + filepath : str or os.PathLike object Path of the file. key : str, optional Extension of the file, which is used as a key for choosing the correct @@ -155,7 +155,7 @@ def add_file_path(self, filepath, key="", is_domain: bool = False, domain_id=0): request = data_sources_pb2.UpdateRequest() request.key = key - request.path = filepath + request.path = str(filepath) if is_domain: request.domain.domain_path = True request.domain.domain_id = domain_id @@ -171,7 +171,7 @@ def add_file_path_for_specified_result(self, filepath, key="", result_key=""): Parameters ---------- - filepath : str + filepath : str or os.PathLike object Path of the file. key : str, optional Extension of the file, which is used as a key for choosing the correct @@ -189,7 +189,7 @@ def add_file_path_for_specified_result(self, filepath, key="", result_key=""): request = data_sources_pb2.UpdateRequest() request.key = key request.result_key = result_key - request.path = filepath + request.path = str(filepath) request.data_sources.CopyFrom(self._message) self._stub.Update(request) diff --git a/ansys/dpf/core/inputs.py b/ansys/dpf/core/inputs.py index cfcb815e7bc..fcd32e19332 100644 --- a/ansys/dpf/core/inputs.py +++ b/ansys/dpf/core/inputs.py @@ -42,9 +42,10 @@ def connect(self, inpt): Parameters ---------- inpt : str, int, double, Field, FieldsContainer, Scoping, DataSources, - MeshedRegion, Output, Outputs, Operator + MeshedRegion, Output, Outputs, Operator, os.PathLike Input of the operator. """ + from pathlib import Path # always convert ranges to lists if isinstance(inpt, range): inpt = list(inpt) @@ -58,6 +59,8 @@ def connect(self, inpt): ) elif isinstance(inpt, core.Model): inpt = inpt.metadata.data_sources + elif isinstance(inpt, Path): + inpt = str(inpt) input_type_name = type(inpt).__name__ if not ( @@ -182,9 +185,10 @@ def connect(self, inpt): ---------- inpt : str, int, double, Field, FieldsContainer, Scoping, DataSources, MeshedRegion, ScopingsContainer, CyclicSupport, - ..., Output, Outputs, Operator + ..., Output, Outputs, Operator, os.PathLike Input of the operator. """ + from pathlib import Path corresponding_pins = [] if isinstance(inpt, core.Operator): if hasattr(inpt, "outputs"): @@ -196,6 +200,8 @@ def connect(self, inpt): ) elif isinstance(inpt, core.Model): inpt = inpt.metadata.data_sources + elif isinstance(inpt, Path): + inpt = str(inpt) input_type_name = type(inpt).__name__ for input_pin in self._inputs: diff --git a/ansys/dpf/core/operators/mesh/stl_export.py b/ansys/dpf/core/operators/mesh/stl_export.py index 96616ae335f..408eac4fde3 100644 --- a/ansys/dpf/core/operators/mesh/stl_export.py +++ b/ansys/dpf/core/operators/mesh/stl_export.py @@ -16,7 +16,7 @@ class stl_export(Operator): Parameters ---------- mesh : MeshedRegion - file_path : str + file_path : str or os.PathLike Examples @@ -49,7 +49,7 @@ def __init__(self, mesh=None, file_path=None, config=None, server=None): if mesh is not None: self.inputs.mesh.connect(mesh) if file_path is not None: - self.inputs.file_path.connect(file_path) + self.inputs.file_path.connect(str(file_path)) @staticmethod def _spec(): diff --git a/ansys/dpf/core/operators/result/pres_to_field.py b/ansys/dpf/core/operators/result/pres_to_field.py index ddb7c869d93..5c91cdd8075 100644 --- a/ansys/dpf/core/operators/result/pres_to_field.py +++ b/ansys/dpf/core/operators/result/pres_to_field.py @@ -15,7 +15,7 @@ class pres_to_field(Operator): Parameters ---------- - filepath : str + filepath : str or os.PathLike Filepath diff --git a/ansys/dpf/core/operators/result/prns_to_field.py b/ansys/dpf/core/operators/result/prns_to_field.py index 8e81934cf50..86e0da63b27 100644 --- a/ansys/dpf/core/operators/result/prns_to_field.py +++ b/ansys/dpf/core/operators/result/prns_to_field.py @@ -15,7 +15,7 @@ class prns_to_field(Operator): Parameters ---------- - filepath : str + filepath : str or os.PathLike Filepath diff --git a/ansys/dpf/core/operators/result/run.py b/ansys/dpf/core/operators/result/run.py index 73cc0f6c306..2123b138212 100644 --- a/ansys/dpf/core/operators/result/run.py +++ b/ansys/dpf/core/operators/result/run.py @@ -16,7 +16,7 @@ class run(Operator): Parameters ---------- - mapdl_exe_path : str, optional + mapdl_exe_path : str or os.PathLike, optional working_dir : str, optional number_of_processes : int, optional Set the number of mpi processes used for @@ -67,7 +67,7 @@ def __init__( self._inputs = InputsRun(self) self._outputs = OutputsRun(self) if mapdl_exe_path is not None: - self.inputs.mapdl_exe_path.connect(mapdl_exe_path) + self.inputs.mapdl_exe_path.connect(str(mapdl_exe_path)) if working_dir is not None: self.inputs.working_dir.connect(working_dir) if number_of_processes is not None: diff --git a/ansys/dpf/core/operators/serialization/deserializer.py b/ansys/dpf/core/operators/serialization/deserializer.py index fcf9e9a8132..b19661704a7 100644 --- a/ansys/dpf/core/operators/serialization/deserializer.py +++ b/ansys/dpf/core/operators/serialization/deserializer.py @@ -16,7 +16,7 @@ class deserializer(Operator): Parameters ---------- - file_path : str + file_path : str or os.PathLike File path diff --git a/ansys/dpf/core/operators/serialization/serialize_to_hdf5.py b/ansys/dpf/core/operators/serialization/serialize_to_hdf5.py index 030df721edf..2de923e2713 100644 --- a/ansys/dpf/core/operators/serialization/serialize_to_hdf5.py +++ b/ansys/dpf/core/operators/serialization/serialize_to_hdf5.py @@ -15,7 +15,7 @@ class serialize_to_hdf5(Operator): Parameters ---------- - file_path : str + file_path : str or os.PathLike Output file path with .h5 extension export_floats : bool Converts double to float to reduce file size diff --git a/ansys/dpf/core/operators/serialization/vtk_export.py b/ansys/dpf/core/operators/serialization/vtk_export.py index c93f453c6e4..19ad619bb41 100644 --- a/ansys/dpf/core/operators/serialization/vtk_export.py +++ b/ansys/dpf/core/operators/serialization/vtk_export.py @@ -15,7 +15,7 @@ class vtk_export(Operator): Parameters ---------- - file_path : str + file_path : str or os.PathLike Path with vtk extension were the export occurs mesh : MeshedRegion, optional diff --git a/ansys/dpf/core/operators/utility/html_doc.py b/ansys/dpf/core/operators/utility/html_doc.py index 84679c73bb0..575b777d0b8 100644 --- a/ansys/dpf/core/operators/utility/html_doc.py +++ b/ansys/dpf/core/operators/utility/html_doc.py @@ -16,7 +16,7 @@ class html_doc(Operator): Parameters ---------- - output_path : str, optional + output_path : str or os.PathLike, optional Default is {working directory}/dataprocessingdoc.html diff --git a/ansys/dpf/core/operators/utility/python_generator.py b/ansys/dpf/core/operators/utility/python_generator.py index 8cb6b1c518c..4bbe569aae8 100644 --- a/ansys/dpf/core/operators/utility/python_generator.py +++ b/ansys/dpf/core/operators/utility/python_generator.py @@ -15,8 +15,8 @@ class python_generator(Operator): Parameters ---------- - dll_source_path : str - output_path : str + dll_source_path : str or os.PathLike + output_path : str or os.PathLike Examples @@ -138,7 +138,7 @@ def dll_source_path(self): Parameters ---------- - my_dll_source_path : str + my_dll_source_path : str or os.PathLike Examples -------- @@ -156,7 +156,7 @@ def output_path(self): Parameters ---------- - my_output_path : str + my_output_path : str or os.PathLike Examples -------- diff --git a/ansys/dpf/core/path_utilities.py b/ansys/dpf/core/path_utilities.py index 8b489d7bc25..f0f4fe056f9 100644 --- a/ansys/dpf/core/path_utilities.py +++ b/ansys/dpf/core/path_utilities.py @@ -8,6 +8,7 @@ import os from ansys.dpf.core import server as server_module +from pathlib import Path def join(*args, **kwargs): @@ -19,7 +20,7 @@ def join(*args, **kwargs): Parameters ---------- - args : str, DPFServer + args : str, os.PathLike, DPFServer Path to join and optionally a server. kwargs : DPFServer @@ -39,6 +40,9 @@ def join(*args, **kwargs): for a in args: if isinstance(a, str) and len(a) > 0: parts.append(a) + elif isinstance(a, Path): + if len(str(a)) > 0: + parts.append(str(a)) elif isinstance(a, server_module.DpfServer): server = a if "server" in kwargs: @@ -64,6 +68,7 @@ def join(*args, **kwargs): return path_to_return def to_server_os(path, server=None): + path = str(path) if not server: server = server_module._global_server() if not server: diff --git a/ansys/dpf/core/server.py b/ansys/dpf/core/server.py index 6637b963ec4..1eb1ba79822 100644 --- a/ansys/dpf/core/server.py +++ b/ansys/dpf/core/server.py @@ -155,7 +155,7 @@ def start_local_server( port : int Port to connect to the remote instance on. The default is ``"DPF_DEFAULT_PORT"``, which is 50054. - ansys_path : str, optional + ansys_path : str or os.PathLike, optional Root path for the Ansys installation directory. For example, ``"/ansys_inc/v212/"``. The default is the latest Ansys installation. as_global : bool, optional @@ -192,7 +192,7 @@ def start_local_server( # parse the version to an int and check for supported try: - ver = int(ansys_path[-3:]) + ver = int(str(ansys_path)[-3:]) if ver < 211: raise errors.InvalidANSYSVersionError(f"Ansys v{ver} does not support DPF") if ver == 211 and is_ubuntu(): @@ -234,7 +234,7 @@ def start_local_server( if server is None: raise OSError( f"Unable to launch the server after {n_attempts} attempts. " - "Check the following path:\n{ansys_path}\n\n" + "Check the following path:\n{str(ansys_path)}\n\n" "or attempt to use a different port" ) @@ -294,7 +294,7 @@ class DpfServer: Parameters ----------- - server_bin : str + server_bin : str or os.PathLike Path for the DPF executable. ip : str IP address of the remote or local instance to connect to. The @@ -340,7 +340,7 @@ def __init__( if os.name == "posix" and "ubuntu" in platform.platform().lower(): raise OSError("DPF does not support Ubuntu") elif launch_server: - self._server_id = launch_dpf(ansys_path, ip, port, docker_name=docker_name) + self._server_id = launch_dpf(str(ansys_path), ip, port, docker_name=docker_name) self.channel = grpc.insecure_channel("%s:%d" % (ip, port)) @@ -354,7 +354,7 @@ def __init__( self._input_ip = ip self._input_port = port self.live = True - self.ansys_path = ansys_path + self.ansys_path = str(ansys_path) self._own_process = launch_server self._base_service_instance = None self._session_instance = None @@ -553,13 +553,13 @@ def _run_launch_server_process(ansys_path, ip, port, docker_name): path_in_install = "aisol/bin/linx64" # verify ansys path is valid - if os.path.isdir(f"{ansys_path}/{path_in_install}"): - dpf_run_dir = f"{ansys_path}/{path_in_install}" + if os.path.isdir(f"{str(ansys_path)}/{path_in_install}"): + dpf_run_dir = f"{str(ansys_path)}/{path_in_install}" else: - dpf_run_dir = f"{ansys_path}" + dpf_run_dir = f"{str(ansys_path)}" if not os.path.isdir(dpf_run_dir): raise NotADirectoryError( - f'Invalid ansys path at "{ansys_path}". ' + f'Invalid ansys path at "{str(ansys_path)}". ' "Unable to locate the directory containing DPF at " f'"{dpf_run_dir}"' ) @@ -575,7 +575,7 @@ def launch_dpf(ansys_path, ip=LOCALHOST, port=DPF_DEFAULT_PORT, timeout=10, dock Parameters ---------- - ansys_path : str, optional + ansys_path : str or os.PathLike, optional Root path for the Ansys installation directory. For example, ``"/ansys_inc/v212/"``. The default is the latest Ansys installation. ip : str, optional From 668aebc1d0a8cdcadcb65786740ff039f2831131 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Tue, 8 Mar 2022 15:57:44 +0100 Subject: [PATCH 2/6] Issue#186: enable os.PathLike objects as arguments in pydpf-core - modified Operator.connect() and updated docstring, no tests. Removed prior direct changes to operators --- ansys/dpf/core/dpf_operator.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/ansys/dpf/core/dpf_operator.py b/ansys/dpf/core/dpf_operator.py index c1f25edcc02..dfea13f3aca 100644 --- a/ansys/dpf/core/dpf_operator.py +++ b/ansys/dpf/core/dpf_operator.py @@ -137,7 +137,7 @@ def connect(self, pin, inpt, pin_out=0): Number of the input pin. inpt : str, int, double, bool, list of int, list of doubles, Field, FieldsContainer, Scoping, ScopingsContainer, MeshedRegion, - MeshesContainer, DataSources, Operator + MeshesContainer, DataSources, Operator, os.PathLike Object to connect to. pin_out : int, optional If the input is an operator, the output pin of the input operator. The @@ -729,9 +729,11 @@ def _fillConnectionRequestMessage(request, inpt, server, pin_out=0): workflow, time_freq_support, ) - + from pathlib import Path if isinstance(inpt, str): request.str = inpt + elif isinstance(inpt, Path): + request.str = str(inpt) elif isinstance(inpt, bool): request.bool = inpt elif isinstance(inpt, int): From 7523bcdb6d3bf501cf8c71041d7f38e4417982a1 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 9 Mar 2022 09:34:59 +0100 Subject: [PATCH 3/6] Added path support to Model() via Metadata._set_data_sources which tests types. Several tests in test_pathsupport.py covering most (all?) points of entry for paths. Modified test_elements.py test_descriptor_with_int_value which was getting allkindofcomplexity for no apparent reason. --- ansys/dpf/core/model.py | 3 ++- tests/test_elements.py | 2 +- tests/test_pathsupport.py | 49 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 52 insertions(+), 2 deletions(-) create mode 100644 tests/test_pathsupport.py diff --git a/ansys/dpf/core/model.py b/ansys/dpf/core/model.py index 372a3e546ff..efe96b3cd46 100644 --- a/ansys/dpf/core/model.py +++ b/ansys/dpf/core/model.py @@ -384,9 +384,10 @@ def streams_provider(self): return self._stream_provider def _set_data_sources(self, var_inp): + from pathlib import Path if isinstance(var_inp, dpf.core.DataSources): self._data_sources = var_inp - elif isinstance(var_inp, str): + elif isinstance(var_inp, (str, Path)): self._data_sources = DataSources(var_inp, server=self._server) else: self._data_sources = DataSources(server=self._server) diff --git a/tests/test_elements.py b/tests/test_elements.py index a6a89467e18..d3d095dea12 100644 --- a/tests/test_elements.py +++ b/tests/test_elements.py @@ -144,7 +144,7 @@ def test_no_element_descriptor(): ) -def test_descriptor_with_int_value(allkindofcomplexity): +def test_descriptor_with_int_value(): # int as attribute instead of element_types.VALUE descriptor = dpf.element_types.descriptor(1) check_element_attributes( diff --git a/tests/test_pathsupport.py b/tests/test_pathsupport.py new file mode 100644 index 00000000000..dcfe997a494 --- /dev/null +++ b/tests/test_pathsupport.py @@ -0,0 +1,49 @@ +# Tests specific to pathlib.Path support as path argument instead of str +import pytest +import functools +import os + +from ansys import dpf +from pathlib import Path +from ansys.dpf.core import path_utilities + +skip_always = pytest.mark.skipif(True, reason="Investigate why this is failing") + + +def test_create_with_resultpath_data_sources_path(allkindofcomplexity): + path = Path(allkindofcomplexity) + data_sources = dpf.core.DataSources(path) + assert data_sources._message.id != 0 + + +def test_addpath_data_sources_path(allkindofcomplexity): + path = Path(allkindofcomplexity) + data_sources = dpf.core.DataSources() + data_sources.add_file_path(path) + print(data_sources) + + +def test_print_data_sources_path(allkindofcomplexity): + path = Path(allkindofcomplexity) + data_sources = dpf.core.DataSources() + data_sources.set_result_file_path(path) + print(data_sources) + assert data_sources.result_key == "rst" + assert data_sources.result_files == [allkindofcomplexity] + + +def test_all_result_operators_exist_path(allkindofcomplexity): + path = Path(allkindofcomplexity) + model = dpf.core.Model(path) + res = model.results + for key in res.__dict__: + if isinstance(res.__dict__[key], functools.partial): + res.__dict__[key]() + + +def test_operator_connect_path(allkindofcomplexity): + path = Path(allkindofcomplexity) + op = dpf.core.operators.serialization.field_to_csv() + op.connect(0, path) + op.inputs.connect(path) + op.inputs.file_path.connect(path) From 625a9d81f595cd37dd26860c6bf24e7d86bf2400 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 9 Mar 2022 09:37:25 +0100 Subject: [PATCH 4/6] Corrected imports to pass flake8 --- tests/test_pathsupport.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/test_pathsupport.py b/tests/test_pathsupport.py index dcfe997a494..556626f3efb 100644 --- a/tests/test_pathsupport.py +++ b/tests/test_pathsupport.py @@ -1,11 +1,9 @@ # Tests specific to pathlib.Path support as path argument instead of str import pytest import functools -import os from ansys import dpf from pathlib import Path -from ansys.dpf.core import path_utilities skip_always = pytest.mark.skipif(True, reason="Investigate why this is failing") From 8ec24f81a01dbe0c28a2be1bc8bf0bfdf2b9ce71 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 9 Mar 2022 09:43:40 +0100 Subject: [PATCH 5/6] Amended the Model() docstring as accepting os.PathLike objects --- ansys/dpf/core/model.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ansys/dpf/core/model.py b/ansys/dpf/core/model.py index efe96b3cd46..717c94cb77e 100644 --- a/ansys/dpf/core/model.py +++ b/ansys/dpf/core/model.py @@ -24,9 +24,9 @@ class Model: Parameters ---------- - data_sources : str, dpf.core.DataSources - Accepts either a :class:`dpf.core.DataSources` instance or the name of the - result file to open. The default is ``None``. + data_sources : str, dpf.core.DataSources, os.PathLike + Accepts either a :class:`dpf.core.DataSources` instance or the path of the + result file to open as an os.PathLike object or a str. The default is ``None``. server : server.DPFServer, optional Server with the channel connected to the remote or local instance. The default is ``None``, in which case an attempt is made to use the global From 449a33b94b861c81ae507f103c7523f0b5f48073 Mon Sep 17 00:00:00 2001 From: "paul.profizi" Date: Wed, 9 Mar 2022 14:12:11 +0100 Subject: [PATCH 6/6] Refactored path_utilities.py according to reviews --- ansys/dpf/core/path_utilities.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/ansys/dpf/core/path_utilities.py b/ansys/dpf/core/path_utilities.py index f0f4fe056f9..79a444d71bc 100644 --- a/ansys/dpf/core/path_utilities.py +++ b/ansys/dpf/core/path_utilities.py @@ -38,11 +38,8 @@ def join(*args, **kwargs): server = None parts = [] for a in args: - if isinstance(a, str) and len(a) > 0: - parts.append(a) - elif isinstance(a, Path): - if len(str(a)) > 0: - parts.append(str(a)) + if isinstance(a, (str, Path)) and Path(a) != Path(""): + parts.append(str(a)) elif isinstance(a, server_module.DpfServer): server = a if "server" in kwargs: