From 57ed55ae30af075dece33e6ee485f932f4f6d3bf Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Fri, 2 Aug 2024 12:43:54 +0530 Subject: [PATCH 01/60] Integrate with data transfer worker --- src/ansys/hps/client/client.py | 41 ++++++++- src/ansys/hps/client/jms/api/project_api.py | 98 ++++++++++++++------- 2 files changed, 104 insertions(+), 35 deletions(-) diff --git a/src/ansys/hps/client/client.py b/src/ansys/hps/client/client.py index 905275bb3..95de6d5e4 100644 --- a/src/ansys/hps/client/client.py +++ b/src/ansys/hps/client/client.py @@ -21,10 +21,13 @@ # SOFTWARE. """Module providing the Python client to the HPS APIs.""" +import atexit import logging from typing import Union import warnings +from ansys.hps.data_transfer.client import Client as dtClient +from ansys.hps.data_transfer.client import DataTransferApi import jwt import requests @@ -76,6 +79,9 @@ class Client(object): refresh_token : str, optional Refresh token. auth_url : str, optional + data_transfer_url: str + URL pointing to the data transfer API endpoint. + The default is ``'https://127.0.0.1:8443/hps/dt/api/v1'``. all_fields : bool, optional Whether to apply the ``fields="all"`` query parameter to all requests so that all available fields are returned for the requested resources. The @@ -98,7 +104,8 @@ class Client(object): >>> cl = Client( ... url="https://localhost:8443/hps", ... username="repuser", - ... password="repuser" + ... password="repuser", + dts_url="https://localhost:8443/hps/dt/api/v1" ... ) Create a client object and connect to HPS with a refresh token. @@ -106,7 +113,8 @@ class Client(object): >>> cl = Client( ... url="https://localhost:8443/hps", ... username="repuser", - ... refresh_token="eyJhbGciOiJIUzI1NiIsInR5cC..." + ... refresh_token="eyJhbGciOiJIUzI1NiIsInR5cC...", + dts_url="https://localhost:8443/hps/dt/api/v1" >>> ) """ @@ -128,6 +136,7 @@ def __init__( all_fields=True, verify: Union[bool, str] = None, disable_security_warnings: bool = True, + data_transfer_url: str = "https://localhost:8443/hps/dt/api/v1", **kwargs, ): @@ -150,6 +159,8 @@ def __init__( self.client_id = client_id self.client_secret = client_secret self.verify = verify + self.data_transfer_url = data_transfer_url + self.dt_client = None if self.verify is None: self.verify = False @@ -225,6 +236,13 @@ def __init__( self._unauthorized_num_retry = 0 self._unauthorized_max_retry = 1 + def exit_handler(): + log.info("Exiting gracefully.") + if self.dt_client is not None: + self.dt_client.stop() + + atexit.register(exit_handler) + @property def rep_url(self) -> str: msg = "The client 'rep_url' property is deprecated. Use 'url' instead." @@ -232,6 +250,25 @@ def rep_url(self) -> str: log.warning(msg) return self.url + def _start_dt_worker(self): + + if self.dt_client is None: + log.info("Starting Data Transfer client.") + # start Data transfer client + self.dt_client = dtClient() + + self.dt_client.binary_config.update( + verbosity=3, + debug=False, + insecure=True, + token=self.access_token, + data_transfer_url=self.data_transfer_url, + ) + self.dt_client.start() + + self.dt_api = DataTransferApi(self.dt_client) + self.dt_api.status(wait=True) + def _auto_refresh_token(self, response, *args, **kwargs): """Automatically refreshes the access token and resends the request in case of an unauthorized error.""" diff --git a/src/ansys/hps/client/jms/api/project_api.py b/src/ansys/hps/client/jms/api/project_api.py index f74fd790f..21aa2e169 100644 --- a/src/ansys/hps/client/jms/api/project_api.py +++ b/src/ansys/hps/client/jms/api/project_api.py @@ -23,10 +23,12 @@ import json import logging import os -from pathlib import Path +import shutil from typing import Callable, List, Type, Union from warnings import warn +from ansys.hps.data_transfer.client.models.msg import SrcDst, StoragePath +from ansys.hps.data_transfer.client.models.ops import OperationState import requests from ansys.hps.client.client import Client @@ -657,17 +659,36 @@ def _delete_objects(self, objects: List[Object], obj_type: Type[Object]): def _download_files(project_api: ProjectApi, files: List[File]): """ - Download files directly using the fs REST gateway. + Download files directly using data transfer worker. - This is a temporary implementation for downloading files. It is to be - replaced with direct ansft calls, when it is available as a Python package. """ + project_api.client._start_dt_worker() + out_path = os.path.join(os.path.dirname(__file__), "downloads") + for f in files: if getattr(f, "hash", None) is not None: - r = project_api.client.session.get(f"{project_api.fs_bucket_url}/{f.storage_id}") - f.content = r.content - f.content_type = r.headers["Content-Type"] + fpath = os.path.join(out_path, f"{f.id}") + download_path = os.path.join(fpath, f.evaluation_path) + base_dir = project_api.project_id + + log.info(f"Downloading file {f.id}") + src = StoragePath(path=f"{base_dir}/{os.path.basename(f.storage_id)}") + dst = StoragePath(path=download_path, remote="local") + op = project_api.client.dt_api.copy([SrcDst(src=src, dst=dst)]) + op = project_api.client.dt_api.wait_for([op.id]) + + log.info(f"Operation {op[0].state}") + if op[0].state == OperationState.Succeeded: + with open(download_path, "rb") as inp: + f.content = inp.read() + else: + log.error(f"Download of file {f.evaluation_path} with id {f.id} failed") + + # Delete temporary folder + if os.path.exists(out_path): + print("deleting folder") + shutil.rmtree(out_path) def get_files(project_api: ProjectApi, as_objects=True, content=False, **query_params): @@ -682,32 +703,38 @@ def get_files(project_api: ProjectApi, as_objects=True, content=False, **query_p def _upload_files(project_api: ProjectApi, files): """ - Uploads files directly using the fs REST gateway. + Uploads files directly using data transfer worker. - This is a temporary implementation for uploading files. It is to be - replaced with direct ansft calls, when it is available as a Python package. """ - fs_headers = {"content-type": "application/octet-stream"} + + project_api.client._start_dt_worker() for f in files: if getattr(f, "src", None) is None: continue is_file = isinstance(f.src, str) and os.path.exists(f.src) - content = f.src - if is_file: - content = open(f.src, "rb") - - r = project_api.client.session.post( - f"{project_api.fs_bucket_url}/{f.storage_id}", - data=content, - headers=fs_headers, - ) - f.hash = r.json()["checksum"] - f.size = r.request.headers.get("Content-Length", None) if is_file: - content.close() + base_dir = project_api.project_id + log.info(f"Copying files {f.id}") + src = StoragePath(path=f.src, remote="local") + dst = StoragePath(path=f"{base_dir}/{os.path.basename(f.storage_id)}") + op = project_api.client.dt_api.copy([SrcDst(src=src, dst=dst)]) + op = project_api.client.dt_api.wait_for(op.id) + log.info(f"Operation {op[0].state}") + if op[0].state == OperationState.Succeeded: + op = project_api.client.dt_api.get_metadata([dst]) + op = project_api.client.dt_api.wait_for(op.id)[0] + log.info(f"Operation {op.state}") + if op.state == OperationState.Succeeded: + md = op.result[dst.path] + f.hash = md["checksum"] + f.size = md["size"] + else: + log.error(f"Failed to fetch metadata of uploaded file {f.src}") + else: + log.error(f"Upload of file {f.src} failed") def create_files(project_api: ProjectApi, files, as_objects=True) -> List[File]: @@ -755,21 +782,26 @@ def _download_file( stream: bool = True, ) -> str: """Download a file.""" + + project_api.client._start_dt_worker() + if getattr(file, "hash", None) is None: log.warning(f"No hash found for file {file.name}.") - download_link = f"{project_api.fs_bucket_url}/{file.storage_id}" download_path = os.path.join(target_path, file.evaluation_path) - Path(download_path).parent.mkdir(parents=True, exist_ok=True) + base_dir = project_api.project_id - with ( - project_api.client.session.get(download_link, stream=stream) as r, - open(download_path, "wb") as f, - ): - for chunk in r.iter_content(chunk_size=None): - f.write(chunk) - if progress_handler is not None: - progress_handler(len(chunk)) + log.info(f"Downloading file {file.id}") + src = StoragePath(path=f"{base_dir}/{os.path.basename(file.storage_id)}") + dst = StoragePath(path=download_path, remote="local") + op = project_api.client.dt_api.copy([SrcDst(src=src, dst=dst)]) + op = project_api.client.dt_api.wait_for([op.id]) + + log.info(f"Operation {op[0].state}") + + if op[0].state != OperationState.Succeeded: + log.error(f"Download of file {file.evaluation_path} with id {file.id} failed") + return None return download_path From 4d0e1dfa4cd9c285b28fc66494346857349565e4 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Fri, 2 Aug 2024 13:03:41 +0530 Subject: [PATCH 02/60] update dependency --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 517ccf6a1..ce509c80c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,8 @@ dependencies = [ "marshmallow_oneofschema>=2.0.1", "backoff>=2.0.0", "pydantic>=1.10.0", - "PyJWT>=2.8.0" + "PyJWT>=2.8.0", + "ansys-hps-data-transfer-client@git+https://github.com/ansys-internal/hps-data-transfer-client.git@main#egg=ansys-hps-data-transfer-client" ] [project.optional-dependencies] From 198a877c1d1ba5d8bb01d73e29c374226cbae70c Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Fri, 2 Aug 2024 16:58:13 +0530 Subject: [PATCH 03/60] updated --- src/ansys/hps/client/jms/api/project_api.py | 99 +++++++++++++-------- 1 file changed, 61 insertions(+), 38 deletions(-) diff --git a/src/ansys/hps/client/jms/api/project_api.py b/src/ansys/hps/client/jms/api/project_api.py index 21aa2e169..75dab525f 100644 --- a/src/ansys/hps/client/jms/api/project_api.py +++ b/src/ansys/hps/client/jms/api/project_api.py @@ -666,29 +666,36 @@ def _download_files(project_api: ProjectApi, files: List[File]): project_api.client._start_dt_worker() out_path = os.path.join(os.path.dirname(__file__), "downloads") + base_dir = project_api.project_id + srcs = [] + dsts = [] for f in files: if getattr(f, "hash", None) is not None: fpath = os.path.join(out_path, f"{f.id}") download_path = os.path.join(fpath, f.evaluation_path) - base_dir = project_api.project_id - - log.info(f"Downloading file {f.id}") - src = StoragePath(path=f"{base_dir}/{os.path.basename(f.storage_id)}") - dst = StoragePath(path=download_path, remote="local") - op = project_api.client.dt_api.copy([SrcDst(src=src, dst=dst)]) - op = project_api.client.dt_api.wait_for([op.id]) + srcs.append(StoragePath(path=f"{base_dir}/{os.path.basename(f.storage_id)}")) + dsts.append(StoragePath(path=download_path, remote="local")) - log.info(f"Operation {op[0].state}") - if op[0].state == OperationState.Succeeded: - with open(download_path, "rb") as inp: - f.content = inp.read() - else: - log.error(f"Download of file {f.evaluation_path} with id {f.id} failed") + if len(srcs) > 0: + log.info(f"Downloading files") + op = project_api.client.dt_api.copy( + [SrcDst(src=src, dst=dst) for src, dst in zip(srcs, dsts)] + ) + op = project_api.client.dt_api.wait_for([op.id]) + log.info(f"Operation {op[0].state}") + if op[0].state == OperationState.Succeeded: + for f in files: + if getattr(f, "hash", None) is not None: + fpath = os.path.join(out_path, f"{f.id}") + download_path = os.path.join(fpath, f.evaluation_path) + with open(download_path, "rb") as inp: + f.content = inp.read() + else: + log.error(f"Download of files failed") - # Delete temporary folder - if os.path.exists(out_path): - print("deleting folder") - shutil.rmtree(out_path) + # Delete temporary folder + if os.path.exists(out_path): + shutil.rmtree(out_path) def get_files(project_api: ProjectApi, as_objects=True, content=False, **query_params): @@ -708,33 +715,49 @@ def _upload_files(project_api: ProjectApi, files): """ project_api.client._start_dt_worker() + srcs = [] + dsts = [] + base_dir = project_api.project_id for f in files: if getattr(f, "src", None) is None: continue - is_file = isinstance(f.src, str) and os.path.exists(f.src) - if is_file: - base_dir = project_api.project_id - log.info(f"Copying files {f.id}") - src = StoragePath(path=f.src, remote="local") - dst = StoragePath(path=f"{base_dir}/{os.path.basename(f.storage_id)}") - op = project_api.client.dt_api.copy([SrcDst(src=src, dst=dst)]) - op = project_api.client.dt_api.wait_for(op.id) - log.info(f"Operation {op[0].state}") - if op[0].state == OperationState.Succeeded: - op = project_api.client.dt_api.get_metadata([dst]) - op = project_api.client.dt_api.wait_for(op.id)[0] - log.info(f"Operation {op.state}") - if op.state == OperationState.Succeeded: - md = op.result[dst.path] - f.hash = md["checksum"] - f.size = md["size"] - else: - log.error(f"Failed to fetch metadata of uploaded file {f.src}") - else: - log.error(f"Upload of file {f.src} failed") + srcs.append(StoragePath(path=f.src, remote="local")) + dsts.append(StoragePath(path=f"{base_dir}/{os.path.basename(f.storage_id)}")) + if len(srcs) > 0: + log.info(f"Uploading files") + op = project_api.client.dt_api.copy( + [SrcDst(src=src, dst=dst) for src, dst in zip(srcs, dsts)] + ) + op = project_api.client.dt_api.wait_for(op.id) + log.info(f"Operation {op[0].state}") + if op[0].state == OperationState.Succeeded: + _fetch_file_metadata(project_api, files, dsts) + else: + log.error(f"Upload of files failed") + else: + log.info("No files to upload") + + +def _fetch_file_metadata( + project_api: ProjectApi, files: List[File], storagePaths: List[StoragePath] +): + log.info(f"Getting upload file metadata") + op = project_api.client.dt_api.get_metadata(storagePaths) + op = project_api.client.dt_api.wait_for(op.id)[0] + log.info(f"Operation {op.state}") + if op.state == OperationState.Succeeded: + base_dir = project_api.project_id + for f in files: + if getattr(f, "src", None) is None: + continue + md = op.result[f"{base_dir}/{os.path.basename(f.storage_id)}"] + f.hash = md["checksum"] + f.size = md["size"] + else: + log.error(f"Failed to fetch metadata of uploaded files") def create_files(project_api: ProjectApi, files, as_objects=True) -> List[File]: From 6ac86205dee1ae9bbb257086d0cff4c9f489e7d4 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Thu, 8 Aug 2024 15:15:53 +0530 Subject: [PATCH 04/60] updated file --- src/ansys/hps/client/client.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/ansys/hps/client/client.py b/src/ansys/hps/client/client.py index 95de6d5e4..993ec0e29 100644 --- a/src/ansys/hps/client/client.py +++ b/src/ansys/hps/client/client.py @@ -26,7 +26,7 @@ from typing import Union import warnings -from ansys.hps.data_transfer.client import Client as dtClient +from ansys.hps.data_transfer.client import Client as DTClient from ansys.hps.data_transfer.client import DataTransferApi import jwt import requests @@ -136,7 +136,7 @@ def __init__( all_fields=True, verify: Union[bool, str] = None, disable_security_warnings: bool = True, - data_transfer_url: str = "https://localhost:8443/hps/dt/api/v1", + data_transfer_url: str | None = None, **kwargs, ): @@ -159,7 +159,7 @@ def __init__( self.client_id = client_id self.client_secret = client_secret self.verify = verify - self.data_transfer_url = data_transfer_url + self.data_transfer_url = data_transfer_url if data_transfer_url else url + "/dt/api/v1" self.dt_client = None if self.verify is None: @@ -255,7 +255,7 @@ def _start_dt_worker(self): if self.dt_client is None: log.info("Starting Data Transfer client.") # start Data transfer client - self.dt_client = dtClient() + self.dt_client = DTClient() self.dt_client.binary_config.update( verbosity=3, @@ -282,6 +282,8 @@ def _auto_refresh_token(self, response, *args, **kwargs): response.request.headers.update( {"Authorization": self.session.headers["Authorization"]} ) + if self.dt_client is not None: + self.dt_client.binary_config.update(token=self.access_token) log.debug(f"Retrying request with updated access token.") return self.session.send(response.request) From 48793152c9ee73839db0ef3efc2f43e632578914 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 09:46:07 +0000 Subject: [PATCH 05/60] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/ansys/hps/client/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ansys/hps/client/client.py b/src/ansys/hps/client/client.py index 993ec0e29..80a514e33 100644 --- a/src/ansys/hps/client/client.py +++ b/src/ansys/hps/client/client.py @@ -136,7 +136,7 @@ def __init__( all_fields=True, verify: Union[bool, str] = None, disable_security_warnings: bool = True, - data_transfer_url: str | None = None, + data_transfer_url: str | None = None, **kwargs, ): From 9850c5ce73f6f945ccb1a1db60a2b9ad072e864d Mon Sep 17 00:00:00 2001 From: Oliver Koenig Date: Sat, 17 Aug 2024 23:01:11 +0200 Subject: [PATCH 06/60] Update ProjectApi.copy_default_execution_script function to use new data transfer --- src/ansys/hps/client/jms/api/project_api.py | 46 +++++++++------------ 1 file changed, 19 insertions(+), 27 deletions(-) diff --git a/src/ansys/hps/client/jms/api/project_api.py b/src/ansys/hps/client/jms/api/project_api.py index 75dab525f..75a38838c 100644 --- a/src/ansys/hps/client/jms/api/project_api.py +++ b/src/ansys/hps/client/jms/api/project_api.py @@ -29,7 +29,6 @@ from ansys.hps.data_transfer.client.models.msg import SrcDst, StoragePath from ansys.hps.data_transfer.client.models.ops import OperationState -import requests from ansys.hps.client.client import Client from ansys.hps.client.common import Object @@ -618,14 +617,25 @@ def copy_default_execution_script(self, filename: str) -> File: execution_script_default_bucket = info["settings"]["execution_script_default_bucket"] # server side copy of the file to project bucket - checksum = _fs_copy_file( - self.client.session, - self.fs_url, - execution_script_default_bucket, - filename, - self.project_id, - file.storage_id, - ) + self.client._start_dt_worker() + src = StoragePath(path=f"{execution_script_default_bucket}/{filename}") + dst = StoragePath(path=f"{self.project_id}/{file.storage_id}") + log.info(f"Copying default execution script {filename}") + op = self.client.dt_api.copy([SrcDst(src=src, dst=dst)]) + op = self.client.dt_api.wait_for(op.id)[0] + log.debug(f"Operation {op.state}") + if op.state != OperationState.Succeeded: + raise HPSError(f"Copying of default execution script {filename} failed") + + # get checksum of copied file + op = self.client.dt_api.get_metadata([dst]) + op = self.client.dt_api.wait_for(op.id)[0] + log.debug(f"Operation {op.state}") + if op.state != OperationState.Succeeded: + raise HPSError( + f"Retrieval of meta data of copied default execution script {filename} failed" + ) + checksum = op.result[dst.path]["checksum"] # update file resource file.hash = checksum @@ -896,21 +906,3 @@ def sync_jobs(project_api: ProjectApi, jobs: List[Job]): url = f"{project_api.url}/jobs:sync" # noqa: E231 json_data = json.dumps({"job_ids": [obj.id for obj in jobs]}) r = project_api.client.session.put(f"{url}", data=json_data) - - -def _fs_copy_file( - session: requests.Session, - fs_url: str, - source_bucket: str, - source_name: str, - destination_bucket: str, - destination_name: str, -) -> str: - """Copy files with the fs REST gateway.""" - json_data = json.dumps( - {"destination": f"ansfs://{destination_bucket}/{destination_name}"} # noqa: E231 - ) - r = session.post( - url=f"{fs_url}/{source_bucket}/{source_name}:copy", data=json_data # noqa: E231 - ) - return r.json()["checksum"] From d572c738ed5e91137d9bc4b723c6984dbfb20bb0 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Wed, 28 Aug 2024 11:37:23 +0530 Subject: [PATCH 07/60] updated code as per review observations --- src/ansys/hps/client/__version__.py | 6 ++- src/ansys/hps/client/client.py | 50 ++++++++++++++++++--- src/ansys/hps/client/jms/api/project_api.py | 4 ++ 3 files changed, 52 insertions(+), 8 deletions(-) diff --git a/src/ansys/hps/client/__version__.py b/src/ansys/hps/client/__version__.py index 130cba281..c48c4d39b 100644 --- a/src/ansys/hps/client/__version__.py +++ b/src/ansys/hps/client/__version__.py @@ -29,6 +29,10 @@ # major, minor, patch __version__ = importlib_metadata.version("ansys-hps-client") +__company__ = "Ansys Switzerland GmbH" + +__company_short__ = "Ansys" + # this is only a convenience to default the version # of Ansys simulation applications in PyHPS examples -__ansys_apps_version__ = "2024 R1" +__ansys_apps_version__ = "2024 R2" diff --git a/src/ansys/hps/client/client.py b/src/ansys/hps/client/client.py index 80a514e33..8a8443fce 100644 --- a/src/ansys/hps/client/client.py +++ b/src/ansys/hps/client/client.py @@ -19,10 +19,14 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +from ansys.hps.client.__version__ import __company_short__ + """Module providing the Python client to the HPS APIs.""" import atexit import logging +import os +import platform from typing import Union import warnings @@ -79,9 +83,6 @@ class Client(object): refresh_token : str, optional Refresh token. auth_url : str, optional - data_transfer_url: str - URL pointing to the data transfer API endpoint. - The default is ``'https://127.0.0.1:8443/hps/dt/api/v1'``. all_fields : bool, optional Whether to apply the ``fields="all"`` query parameter to all requests so that all available fields are returned for the requested resources. The @@ -136,7 +137,6 @@ def __init__( all_fields=True, verify: Union[bool, str] = None, disable_security_warnings: bool = True, - data_transfer_url: str | None = None, **kwargs, ): @@ -159,7 +159,7 @@ def __init__( self.client_id = client_id self.client_secret = client_secret self.verify = verify - self.data_transfer_url = data_transfer_url if data_transfer_url else url + "/dt/api/v1" + self.data_transfer_url = url + f"/dt/api/v1" self.dt_client = None if self.verify is None: @@ -237,8 +237,8 @@ def __init__( self._unauthorized_max_retry = 1 def exit_handler(): - log.info("Exiting gracefully.") if self.dt_client is not None: + log.info("Stopping the data transfer client gracefully.") self.dt_client.stop() atexit.register(exit_handler) @@ -255,7 +255,7 @@ def _start_dt_worker(self): if self.dt_client is None: log.info("Starting Data Transfer client.") # start Data transfer client - self.dt_client = DTClient() + self.dt_client = DTClient(download_dir=self._get_download_dir(__company_short__)) self.dt_client.binary_config.update( verbosity=3, @@ -269,6 +269,42 @@ def _start_dt_worker(self): self.dt_api = DataTransferApi(self.dt_client) self.dt_api.status(wait=True) + def _get_download_dir(self, company=None): + """ + Returns download directory platform dependent + + :Parameters: + -`company`: Company name of the software provider + + Resulting paths: + `Linux`: /home/user/.ansys/binaries + `Windows`: C:\\Users\\user\\AppData\\Local\\Ansys\\binaries + + Note that on Windows we use AppData\Local for this, + not AppData\Roaming, as the data stored for an application should typically be kept local. + + """ + + environment_variable = "HOME" + if platform.uname()[0].lower() == "windows": + environment_variable = "LOCALAPPDATA" + if platform.uname()[0].lower() == "darwin": + environment_variable = "LOCALAPPDATA" + path = os.environ.get(environment_variable, None) + + app_dir = "" + if company: + app_dir = os.path.join(app_dir, company) + + if app_dir: + if platform.uname()[0].lower() != "windows": + app_dir = "." + app_dir.lower() + path = os.path.join(path, app_dir) + + path = os.path.join(path, "binaries") + + return path + def _auto_refresh_token(self, response, *args, **kwargs): """Automatically refreshes the access token and resends the request in case of an unauthorized error.""" diff --git a/src/ansys/hps/client/jms/api/project_api.py b/src/ansys/hps/client/jms/api/project_api.py index 75a38838c..420e7c449 100644 --- a/src/ansys/hps/client/jms/api/project_api.py +++ b/src/ansys/hps/client/jms/api/project_api.py @@ -702,6 +702,7 @@ def _download_files(project_api: ProjectApi, files: List[File]): f.content = inp.read() else: log.error(f"Download of files failed") + raise HPSError(f"Download of files failed") # Delete temporary folder if os.path.exists(out_path): @@ -747,6 +748,8 @@ def _upload_files(project_api: ProjectApi, files): _fetch_file_metadata(project_api, files, dsts) else: log.error(f"Upload of files failed") + raise HPSError(f"Upload of files failed") + else: log.info("No files to upload") @@ -768,6 +771,7 @@ def _fetch_file_metadata( f.size = md["size"] else: log.error(f"Failed to fetch metadata of uploaded files") + raise HPSError(f"Failed to fetch metadata of uploaded files") def create_files(project_api: ProjectApi, files, as_objects=True) -> List[File]: From c40260341cbcff3442bc4d113d35f6956f14afa9 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Wed, 28 Aug 2024 11:42:29 +0530 Subject: [PATCH 08/60] updated code --- src/ansys/hps/client/client.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/ansys/hps/client/client.py b/src/ansys/hps/client/client.py index 8a8443fce..ac36e54d9 100644 --- a/src/ansys/hps/client/client.py +++ b/src/ansys/hps/client/client.py @@ -288,14 +288,11 @@ def _get_download_dir(self, company=None): environment_variable = "HOME" if platform.uname()[0].lower() == "windows": environment_variable = "LOCALAPPDATA" - if platform.uname()[0].lower() == "darwin": - environment_variable = "LOCALAPPDATA" path = os.environ.get(environment_variable, None) app_dir = "" if company: app_dir = os.path.join(app_dir, company) - if app_dir: if platform.uname()[0].lower() != "windows": app_dir = "." + app_dir.lower() From 396dcfcc873d1ecd6fcc8a807cda459105850c04 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Wed, 28 Aug 2024 12:08:23 +0530 Subject: [PATCH 09/60] updated code as per review observations --- src/ansys/hps/client/jms/api/project_api.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/ansys/hps/client/jms/api/project_api.py b/src/ansys/hps/client/jms/api/project_api.py index 420e7c449..6a88918da 100644 --- a/src/ansys/hps/client/jms/api/project_api.py +++ b/src/ansys/hps/client/jms/api/project_api.py @@ -25,7 +25,7 @@ import os import shutil from typing import Callable, List, Type, Union -from warnings import warn +import warnings from ansys.hps.data_transfer.client.models.msg import SrcDst, StoragePath from ansys.hps.data_transfer.client.models.ops import OperationState @@ -182,7 +182,7 @@ def download_file( self, file: File, target_path: str, - stream: bool = True, + stream: bool = None, progress_handler: Callable[[int], None] = None, ) -> str: """ @@ -191,7 +191,11 @@ def download_file( If ``stream=True``, data is retrieved in chunks, which avoids storing the entire content in memory. """ - return _download_file(self, file, target_path, progress_handler, stream) + if stream is not None: + msg = "The 'stream' input argument in ProjectApi.download_file() is deprecated. " + warnings.warn(msg, DeprecationWarning) + log.warning(msg) + return _download_file(self, file, target_path, progress_handler) ################################################################ # Parameter definitions @@ -816,7 +820,6 @@ def _download_file( file: File, target_path: str, progress_handler: Callable[[int], None] = None, - stream: bool = True, ) -> str: """Download a file.""" From c046296dd43eb6704ca467544337a53dbb9663a1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 28 Aug 2024 06:44:24 +0000 Subject: [PATCH 10/60] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/ansys/hps/client/client.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/ansys/hps/client/client.py b/src/ansys/hps/client/client.py index ee71afd62..135dbb8df 100644 --- a/src/ansys/hps/client/client.py +++ b/src/ansys/hps/client/client.py @@ -363,7 +363,6 @@ def auth_api_url(self) -> str: log.error("auth_api not valid for non-keycloak implementation") return None - def _auto_refresh_token(self, response, *args, **kwargs): """Automatically refreshes the access token and resends the request in case of an unauthorized error.""" From 8af823dc6a1ec68e75f1113fe39581f060e789d8 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Mon, 2 Sep 2024 18:49:50 +0530 Subject: [PATCH 11/60] update secret --- .github/workflows/tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index cc7cc3b5b..4113ef4fc 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -33,6 +33,9 @@ jobs: python-version: ${{ inputs.python-version }} - name: Install dependencies + with: + secrets: | + GIT_AUTH_TOKEN=${{ secrets.PYANSYS_CI_BOT_PACKAGE_TOKEN }} run: | python -m pip install --upgrade pip setuptools tox tox-gh-actions mkdir docker-compose-artifact From 3d7353d2dc2a1a832c61e0e8346124cb73088652 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Mon, 2 Sep 2024 19:03:55 +0530 Subject: [PATCH 12/60] updated file --- .github/workflows/tests.yml | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 4113ef4fc..8b47acf4b 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -31,11 +31,15 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ inputs.python-version }} + + - name: Create .netrc + run: | + printf "machine github.com\n\ + login pyansys-ci-bot\n\ + password ${{ secrets.PYANSYS_CI_BOT_PACKAGE_TOKEN }}\n"\ >> ./.netrc + chmod 600 ./.netrc - name: Install dependencies - with: - secrets: | - GIT_AUTH_TOKEN=${{ secrets.PYANSYS_CI_BOT_PACKAGE_TOKEN }} run: | python -m pip install --upgrade pip setuptools tox tox-gh-actions mkdir docker-compose-artifact From c34bc96085da41543c8cffc2e53b0858472e5931 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Mon, 2 Sep 2024 19:15:51 +0530 Subject: [PATCH 13/60] updated file --- .github/workflows/tests.yml | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 8b47acf4b..7abb27728 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -31,13 +31,12 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ inputs.python-version }} - - - name: Create .netrc - run: | - printf "machine github.com\n\ - login pyansys-ci-bot\n\ - password ${{ secrets.PYANSYS_CI_BOT_PACKAGE_TOKEN }}\n"\ >> ./.netrc - chmod 600 ./.netrc + + - uses: extractions/netrc@v2 + with: + machine: github.com + username: pyansys-ci-bot + password: ${{ secrets.PYANSYS_CI_BOT_PACKAGE_TOKEN }} - name: Install dependencies run: | From 7ecc0f365c68b6e372094737ab3c2ff1332d9f33 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Mon, 2 Sep 2024 19:25:42 +0530 Subject: [PATCH 14/60] updated file --- .github/workflows/ci_cd.yml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/.github/workflows/ci_cd.yml b/.github/workflows/ci_cd.yml index c3deb89a4..acd4f2366 100644 --- a/.github/workflows/ci_cd.yml +++ b/.github/workflows/ci_cd.yml @@ -69,6 +69,12 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - uses: extractions/netrc@v2 + with: + machine: github.com + username: pyansys-ci-bot + password: ${{ secrets.PYANSYS_CI_BOT_PACKAGE_TOKEN }} - name: Install dependencies run: | @@ -120,6 +126,12 @@ jobs: python-version: '3.12' steps: + - uses: extractions/netrc@v2 + with: + machine: github.com + username: pyansys-ci-bot + password: ${{ secrets.PYANSYS_CI_BOT_PACKAGE_TOKEN }} + - name: Build wheelhouse and perform smoke test uses: ansys/actions/build-wheelhouse@v7 with: @@ -146,6 +158,12 @@ jobs: steps: - name: "Install Git and clone project" uses: actions/checkout@v4 + + - uses: extractions/netrc@v2 + with: + machine: github.com + username: pyansys-ci-bot + password: ${{ secrets.PYANSYS_CI_BOT_PACKAGE_TOKEN }} - name: "Set up Python ${{ env.MAIN_PYTHON_VERSION }}" uses: ansys/actions/_setup-python@main From 9b72db6a6247056db1ad651a1b0b96d175712c26 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Mon, 2 Sep 2024 19:41:14 +0530 Subject: [PATCH 15/60] updated conftest client() --- tests/conftest.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index ebd6f3a19..593362fba 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -55,8 +55,21 @@ def keycloak_password(): @pytest.fixture(scope="session") -def client(url, username, password): - return Client(url, username, password, verify=False) +def client(binary_config, binary_dir): + from ansys.hps.data_transfer.client import Client + + c = Client(bin_config=binary_config, download_dir=binary_dir, clean_dev=False) + c.start() + yield c + + from ansys.hps.data_transfer.client import DataTransferApi + from ansys.hps.data_transfer.client.models.msg import StoragePath + + api = DataTransferApi(c) + op = api.rmdir([StoragePath(path="python_client_tests")]) + api.wait_for(op.id) + + c.stop() @pytest.fixture() From 9e3d28b73f93e79f2e4c6e9903d91b3f71acf3b1 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Mon, 2 Sep 2024 20:10:52 +0530 Subject: [PATCH 16/60] reverted change --- tests/conftest.py | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 593362fba..ebd6f3a19 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -55,21 +55,8 @@ def keycloak_password(): @pytest.fixture(scope="session") -def client(binary_config, binary_dir): - from ansys.hps.data_transfer.client import Client - - c = Client(bin_config=binary_config, download_dir=binary_dir, clean_dev=False) - c.start() - yield c - - from ansys.hps.data_transfer.client import DataTransferApi - from ansys.hps.data_transfer.client.models.msg import StoragePath - - api = DataTransferApi(c) - op = api.rmdir([StoragePath(path="python_client_tests")]) - api.wait_for(op.id) - - c.stop() +def client(url, username, password): + return Client(url, username, password, verify=False) @pytest.fixture() From 8215c7d29b73d4a485b7716570edaaa303ab0c7a Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Mon, 2 Sep 2024 20:33:33 +0530 Subject: [PATCH 17/60] Update action.yml --- .github/actions/hps_services/action.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 2108f434d..89313af4a 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -83,8 +83,8 @@ runs: ls -la tar -xvzf docker-compose-internal.tar.gz cd docker-compose - FSGATEWAY_TAG=latest-dev JMS_TAG=latest-dev docker-compose pull - FSGATEWAY_TAG=latest-dev JMS_TAG=latest-dev docker-compose up -d + FSGATEWAY_TAG=latest-dev JMS_TAG=latest-dev DTS_TAG=latest-dev docker-compose pull + FSGATEWAY_TAG=latest-dev JMS_TAG=latest-dev DTS_TAG=latest-dev docker-compose --profile=data-transfer up -d working-directory: ./docker-compose-artifact - name: Wait for services @@ -100,4 +100,4 @@ runs: run: | echo "url=https://localhost:8443/hps" >> $GITHUB_OUTPUT echo "path=$(pwd)" >> $GITHUB_OUTPUT - working-directory: ./docker-compose-artifact/docker-compose \ No newline at end of file + working-directory: ./docker-compose-artifact/docker-compose From f19fd6508cd9ccb6da5b28d93d7f568b1b6cc8ed Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Mon, 2 Sep 2024 20:37:32 +0530 Subject: [PATCH 18/60] Update action.yml --- .github/actions/hps_services/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 89313af4a..d355d252d 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -90,7 +90,7 @@ runs: - name: Wait for services shell: bash run: | - curl -k --head -X GET --retry 30 --retry-connrefused --retry-delay 1 https://localhost:8443/hps/jms/api/v1 + curl -k --head -X GET --retry 30 --retry-connrefused --retry-delay 10 https://localhost:8443/hps/jms/api/v1 curl -k --head -X GET --retry 30 --retry-connrefused --retry-delay 1 https://localhost:8443/hps/fs/api/v1 curl -k --head -X GET --retry 30 --retry-connrefused --retry-delay 1 https://localhost:8443/hps/rms/api/v1 From 2aa226df944d29563ebc70d0ceb9f515920f7755 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Mon, 2 Sep 2024 20:45:02 +0530 Subject: [PATCH 19/60] Update action.yml --- .github/actions/hps_services/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index d355d252d..e5f75a71c 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -84,7 +84,7 @@ runs: tar -xvzf docker-compose-internal.tar.gz cd docker-compose FSGATEWAY_TAG=latest-dev JMS_TAG=latest-dev DTS_TAG=latest-dev docker-compose pull - FSGATEWAY_TAG=latest-dev JMS_TAG=latest-dev DTS_TAG=latest-dev docker-compose --profile=data-transfer up -d + FSGATEWAY_TAG=latest-dev JMS_TAG=latest-dev DTS_TAG=latest-dev docker-compose --profile=stable --profile=data-transfer up -d working-directory: ./docker-compose-artifact - name: Wait for services From 83fbde124fb325646f83f46106262e198ed414d4 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 11:24:04 +0530 Subject: [PATCH 20/60] Update action.yml --- .github/actions/hps_services/action.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index e5f75a71c..d82047b92 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -83,8 +83,8 @@ runs: ls -la tar -xvzf docker-compose-internal.tar.gz cd docker-compose - FSGATEWAY_TAG=latest-dev JMS_TAG=latest-dev DTS_TAG=latest-dev docker-compose pull - FSGATEWAY_TAG=latest-dev JMS_TAG=latest-dev DTS_TAG=latest-dev docker-compose --profile=stable --profile=data-transfer up -d + FSGATEWAY_TAG=latest-dev JMS_TAG=mgunupa-integration DTS_TAG=latest-dev docker-compose pull + FSGATEWAY_TAG=latest-dev JMS_TAG=mgunupa-integration DTS_TAG=latest-dev docker-compose --profile=stable --profile=data-transfer up -d working-directory: ./docker-compose-artifact - name: Wait for services From c02e56d0883e845525d8ce741a933d343148ef78 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 12:13:36 +0530 Subject: [PATCH 21/60] Update action.yml --- .github/actions/hps_services/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index d82047b92..2fc6c4b7e 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -90,7 +90,7 @@ runs: - name: Wait for services shell: bash run: | - curl -k --head -X GET --retry 30 --retry-connrefused --retry-delay 10 https://localhost:8443/hps/jms/api/v1 + curl -k --head -X GET --retry 30 --retry-all-errors --retry-delay 5 https://localhost:8443/hps/jms/api/v1 curl -k --head -X GET --retry 30 --retry-connrefused --retry-delay 1 https://localhost:8443/hps/fs/api/v1 curl -k --head -X GET --retry 30 --retry-connrefused --retry-delay 1 https://localhost:8443/hps/rms/api/v1 From f2dba9e02aaec74d14475f870507a424f4e00e2c Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 14:46:25 +0530 Subject: [PATCH 22/60] Update action.yml --- .github/actions/hps_services/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 2fc6c4b7e..bd002b1c1 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -49,7 +49,7 @@ runs: with: file: 'docker-compose-internal.tar.gz' target: 'docker-compose-artifact/docker-compose-internal.tar.gz' - repo: ansys-internal/rep-deployments + repo: ansys-internal/rep-deployments@mpawlik/dt_integration version: tags/latest-dev token: ${{ inputs.token }} From 10e4413b86e899f4b16d79b89cf652a5fad98e5c Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 14:56:42 +0530 Subject: [PATCH 23/60] Update ci_cd.yml --- .github/workflows/ci_cd.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci_cd.yml b/.github/workflows/ci_cd.yml index acd4f2366..e80c79713 100644 --- a/.github/workflows/ci_cd.yml +++ b/.github/workflows/ci_cd.yml @@ -138,6 +138,7 @@ jobs: library-name: ${{ env.PACKAGE_NAME }} operating-system: ${{ matrix.os }} python-version: ${{ matrix.python-version }} + check-licenses: false package: name: Package library From 73439fadb12c02e19026ee448806cf0cfb2dd12f Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 15:49:20 +0530 Subject: [PATCH 24/60] Update action.yml --- .github/actions/hps_services/action.yml | 29 +++++++++++++++++-------- 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index bd002b1c1..04d51c2f4 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -43,15 +43,26 @@ runs: version: tags/${{ inputs.version }} token: ${{ inputs.token }} - - if: ${{ inputs.version == 'latest-dev' }} - uses: dsaltares/fetch-gh-release-asset@master - name: Download Internal Services Artifact - with: - file: 'docker-compose-internal.tar.gz' - target: 'docker-compose-artifact/docker-compose-internal.tar.gz' - repo: ansys-internal/rep-deployments@mpawlik/dt_integration - version: tags/latest-dev - token: ${{ inputs.token }} +# - if: ${{ inputs.version == 'latest-dev' }} +# uses: dsaltares/fetch-gh-release-asset@master +# name: Download Internal Services Artifact +# with: +# file: 'docker-compose-internal.tar.gz' +# target: 'docker-compose-artifact/docker-compose-internal.tar.gz' +# repo: ansys-internal/rep-deployments@mpawlik/dt_integration +# version: tags/latest-dev +# token: ${{ inputs.token }} + - if: ${{ inputs.version == 'latest-dev' }} + uses: dawidd6/action-download-artifact@v3 + with: + workflow: action.yaml + name: Download Internal Services Artifact + #path: test_setup/ + branch: dt_integration + repo: ansys-internal/rep-deployments + workflow_conclusion: success + search_artifacts: false + github_token: ${{secrets.PYANSYS_CI_BOT_TOKEN}} - uses: KengoTODA/actions-setup-docker-compose@main env: From 1fa853e5971c614499051f17d7007c09ebd6f8ed Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 15:52:09 +0530 Subject: [PATCH 25/60] Update action.yml --- .github/actions/hps_services/action.yml | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 04d51c2f4..4fc7d764a 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -52,17 +52,16 @@ runs: # repo: ansys-internal/rep-deployments@mpawlik/dt_integration # version: tags/latest-dev # token: ${{ inputs.token }} - - if: ${{ inputs.version == 'latest-dev' }} - uses: dawidd6/action-download-artifact@v3 - with: - workflow: action.yaml - name: Download Internal Services Artifact + - uses: dawidd6/action-download-artifact@v3 + with: + workflow: action.yaml + name: Download Internal Services Artifact #path: test_setup/ - branch: dt_integration - repo: ansys-internal/rep-deployments - workflow_conclusion: success - search_artifacts: false - github_token: ${{secrets.PYANSYS_CI_BOT_TOKEN}} + branch: dt_integration + repo: ansys-internal/rep-deployments + workflow_conclusion: success + search_artifacts: false + github_token: ${{secrets.PYANSYS_CI_BOT_TOKEN}} - uses: KengoTODA/actions-setup-docker-compose@main env: From 15715540d7a66eed4a2243049c5ec4da940c4a4f Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 15:54:54 +0530 Subject: [PATCH 26/60] Update action.yml --- .github/actions/hps_services/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 4fc7d764a..508fe1694 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -61,7 +61,7 @@ runs: repo: ansys-internal/rep-deployments workflow_conclusion: success search_artifacts: false - github_token: ${{secrets.PYANSYS_CI_BOT_TOKEN}} + github_token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} - uses: KengoTODA/actions-setup-docker-compose@main env: From 202000de214d5535896d8411b9443bebcd73c2ac Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 16:00:03 +0530 Subject: [PATCH 27/60] Update action.yml --- .github/actions/hps_services/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 508fe1694..105029a62 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -61,7 +61,7 @@ runs: repo: ansys-internal/rep-deployments workflow_conclusion: success search_artifacts: false - github_token: ${{ secrets.PYANSYS_CI_BOT_TOKEN }} + github_token: ${{ inputs.token }} - uses: KengoTODA/actions-setup-docker-compose@main env: From af0b87d56c45a245c0e6c5a7cd41a1948c62b9cd Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 16:06:15 +0530 Subject: [PATCH 28/60] Update action.yml --- .github/actions/hps_services/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 105029a62..831e4b6b6 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -57,7 +57,7 @@ runs: workflow: action.yaml name: Download Internal Services Artifact #path: test_setup/ - branch: dt_integration + branch: mpawlik/dt_integration repo: ansys-internal/rep-deployments workflow_conclusion: success search_artifacts: false From 6af25a6beec397452b90648424f8368b66bc7071 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 16:14:48 +0530 Subject: [PATCH 29/60] Update action.yml --- .github/actions/hps_services/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 831e4b6b6..04ef86f45 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -55,7 +55,7 @@ runs: - uses: dawidd6/action-download-artifact@v3 with: workflow: action.yaml - name: Download Internal Services Artifact + name: docker-compose-internal #path: test_setup/ branch: mpawlik/dt_integration repo: ansys-internal/rep-deployments From 70a709e0e2d47cab5ca729d166d84fc41b25c23f Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 16:16:02 +0530 Subject: [PATCH 30/60] Update action.yml --- .github/actions/hps_services/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 04ef86f45..74d3ba259 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -54,7 +54,7 @@ runs: # token: ${{ inputs.token }} - uses: dawidd6/action-download-artifact@v3 with: - workflow: action.yaml + workflow: main.yaml name: docker-compose-internal #path: test_setup/ branch: mpawlik/dt_integration From a23a0a7542cc0f101859fc13590411d60c7322ee Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 16:19:22 +0530 Subject: [PATCH 31/60] Update action.yml --- .github/actions/hps_services/action.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 74d3ba259..3279ced38 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -56,7 +56,7 @@ runs: with: workflow: main.yaml name: docker-compose-internal - #path: test_setup/ + path: test_setup/ branch: mpawlik/dt_integration repo: ansys-internal/rep-deployments workflow_conclusion: success @@ -91,6 +91,7 @@ runs: shell: bash run: | ls -la + cd test_setup/ tar -xvzf docker-compose-internal.tar.gz cd docker-compose FSGATEWAY_TAG=latest-dev JMS_TAG=mgunupa-integration DTS_TAG=latest-dev docker-compose pull From f6c845372792e2068bce1048f41ce076019971e1 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 16:24:17 +0530 Subject: [PATCH 32/60] Update action.yml --- .github/actions/hps_services/action.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 3279ced38..42146661a 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -56,7 +56,7 @@ runs: with: workflow: main.yaml name: docker-compose-internal - path: test_setup/ + #path: test_setup/ branch: mpawlik/dt_integration repo: ansys-internal/rep-deployments workflow_conclusion: success @@ -88,10 +88,10 @@ runs: - name: Start services (internal package) if: ${{ inputs.version == 'latest-dev' }} - shell: bash + #shell: bash run: | ls -la - cd test_setup/ + #cd test_setup/ tar -xvzf docker-compose-internal.tar.gz cd docker-compose FSGATEWAY_TAG=latest-dev JMS_TAG=mgunupa-integration DTS_TAG=latest-dev docker-compose pull From ea2f033f36a1a5b865d48e18386e1fe1122d7cce Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 16:27:33 +0530 Subject: [PATCH 33/60] Update action.yml --- .github/actions/hps_services/action.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 42146661a..10c3027b0 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -56,7 +56,6 @@ runs: with: workflow: main.yaml name: docker-compose-internal - #path: test_setup/ branch: mpawlik/dt_integration repo: ansys-internal/rep-deployments workflow_conclusion: success @@ -88,10 +87,9 @@ runs: - name: Start services (internal package) if: ${{ inputs.version == 'latest-dev' }} - #shell: bash + shell: bash run: | ls -la - #cd test_setup/ tar -xvzf docker-compose-internal.tar.gz cd docker-compose FSGATEWAY_TAG=latest-dev JMS_TAG=mgunupa-integration DTS_TAG=latest-dev docker-compose pull From 20affa6618eca94019f170dc16adaccd7dc0d083 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 17:12:30 +0530 Subject: [PATCH 34/60] Update action.yml --- .github/actions/hps_services/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 10c3027b0..786236a5f 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -58,7 +58,7 @@ runs: name: docker-compose-internal branch: mpawlik/dt_integration repo: ansys-internal/rep-deployments - workflow_conclusion: success + workflow_conclusion: neutral search_artifacts: false github_token: ${{ inputs.token }} From 5076ce2a3efe4b13ad36c403a8a9a09a47853ba3 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 17:15:13 +0530 Subject: [PATCH 35/60] Update action.yml --- .github/actions/hps_services/action.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 786236a5f..25becba16 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -62,6 +62,14 @@ runs: search_artifacts: false github_token: ${{ inputs.token }} + - name: Setup tmate session + uses: mxschmitt/action-tmate@v3 + with: + ## limits ssh access and adds the ssh public key for the user which triggered the workflow + limit-access-to-actor: true + ## limits ssh access and adds the ssh public keys of the listed GitHub users + # limit-access-to-users: [username] + - uses: KengoTODA/actions-setup-docker-compose@main env: GITHUB_TOKEN: ${{ inputs.token }} From 18248c88bff61a166558e7ff329f156bcac96d6e Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 17:16:46 +0530 Subject: [PATCH 36/60] Update action.yml --- .github/actions/hps_services/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 25becba16..296e06ea5 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -58,7 +58,7 @@ runs: name: docker-compose-internal branch: mpawlik/dt_integration repo: ansys-internal/rep-deployments - workflow_conclusion: neutral + workflow_conclusion: success search_artifacts: false github_token: ${{ inputs.token }} From bd3a08b07b63abceb04d68c8a961e33208a6a3d8 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 17:22:27 +0530 Subject: [PATCH 37/60] Update action.yml --- .github/actions/hps_services/action.yml | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 296e06ea5..f5acdeea9 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -62,14 +62,6 @@ runs: search_artifacts: false github_token: ${{ inputs.token }} - - name: Setup tmate session - uses: mxschmitt/action-tmate@v3 - with: - ## limits ssh access and adds the ssh public key for the user which triggered the workflow - limit-access-to-actor: true - ## limits ssh access and adds the ssh public keys of the listed GitHub users - # limit-access-to-users: [username] - - uses: KengoTODA/actions-setup-docker-compose@main env: GITHUB_TOKEN: ${{ inputs.token }} @@ -85,6 +77,7 @@ runs: if: ${{ inputs.version != 'latest-dev' }} shell: bash run: | + pwd ls -la tar -xvzf docker-compose-customer.tar.gz mv docker-compose-customer docker-compose From d0de340fbad172c57cec847e491a08988e2978a5 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 17:24:58 +0530 Subject: [PATCH 38/60] Update action.yml --- .github/actions/hps_services/action.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index f5acdeea9..c0f19ebb7 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -77,7 +77,6 @@ runs: if: ${{ inputs.version != 'latest-dev' }} shell: bash run: | - pwd ls -la tar -xvzf docker-compose-customer.tar.gz mv docker-compose-customer docker-compose @@ -90,6 +89,8 @@ runs: if: ${{ inputs.version == 'latest-dev' }} shell: bash run: | + echo "$(pwd)" + pwd ls -la tar -xvzf docker-compose-internal.tar.gz cd docker-compose From f64bb465338ed08009cbae89537cb9c17b1be9f8 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 17:27:44 +0530 Subject: [PATCH 39/60] Update action.yml --- .github/actions/hps_services/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index c0f19ebb7..77258df18 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -83,7 +83,7 @@ runs: cd docker-compose docker-compose build docker-compose up -d - working-directory: ./docker-compose-artifact + working-directory: ./ - name: Start services (internal package) if: ${{ inputs.version == 'latest-dev' }} From df6ce1d5b26a820880fd3dc468b77007327eb02f Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 17:29:47 +0530 Subject: [PATCH 40/60] Update action.yml --- .github/actions/hps_services/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 77258df18..eda32ef89 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -96,7 +96,7 @@ runs: cd docker-compose FSGATEWAY_TAG=latest-dev JMS_TAG=mgunupa-integration DTS_TAG=latest-dev docker-compose pull FSGATEWAY_TAG=latest-dev JMS_TAG=mgunupa-integration DTS_TAG=latest-dev docker-compose --profile=stable --profile=data-transfer up -d - working-directory: ./docker-compose-artifact + #working-directory: ./docker-compose-artifact - name: Wait for services shell: bash From d69578dade463fa2898641db0a51e0779118bdb4 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Tue, 3 Sep 2024 17:33:27 +0530 Subject: [PATCH 41/60] Update action.yml --- .github/actions/hps_services/action.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index eda32ef89..182e08eb1 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -60,6 +60,7 @@ runs: repo: ansys-internal/rep-deployments workflow_conclusion: success search_artifacts: false + path: ./docker-compose-artifact github_token: ${{ inputs.token }} - uses: KengoTODA/actions-setup-docker-compose@main @@ -96,7 +97,7 @@ runs: cd docker-compose FSGATEWAY_TAG=latest-dev JMS_TAG=mgunupa-integration DTS_TAG=latest-dev docker-compose pull FSGATEWAY_TAG=latest-dev JMS_TAG=mgunupa-integration DTS_TAG=latest-dev docker-compose --profile=stable --profile=data-transfer up -d - #working-directory: ./docker-compose-artifact + working-directory: ./docker-compose-artifact - name: Wait for services shell: bash From 6aa40671a598039cb63b7e9d0b65885e7bd56c46 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Mon, 2 Sep 2024 20:32:08 +0530 Subject: [PATCH 42/60] updated files --- src/ansys/hps/client/client.py | 32 ++++++++++++++++++-------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/src/ansys/hps/client/client.py b/src/ansys/hps/client/client.py index 135dbb8df..3541e235b 100644 --- a/src/ansys/hps/client/client.py +++ b/src/ansys/hps/client/client.py @@ -301,21 +301,25 @@ def rep_url(self) -> str: def _start_dt_worker(self): if self.dt_client is None: - log.info("Starting Data Transfer client.") - # start Data transfer client - self.dt_client = DTClient(download_dir=self._get_download_dir(__company_short__)) - - self.dt_client.binary_config.update( - verbosity=3, - debug=False, - insecure=True, - token=self.access_token, - data_transfer_url=self.data_transfer_url, - ) - self.dt_client.start() + try: + log.info("Starting Data Transfer client.") + # start Data transfer client + self.dt_client = DTClient(download_dir=self._get_download_dir(__company_short__)) + + self.dt_client.binary_config.update( + verbosity=3, + debug=False, + insecure=True, + token=self.access_token, + data_transfer_url=self.data_transfer_url, + ) + self.dt_client.start() - self.dt_api = DataTransferApi(self.dt_client) - self.dt_api.status(wait=True) + self.dt_api = DataTransferApi(self.dt_client) + self.dt_api.status(wait=True) + except Exception as ex: + log.debug(ex) + raise HPSError("Error occurred when starting Data Transfer client.") def _get_download_dir(self, company=None): """ From 670d07a7e5f7ed90ca8601528b326783c0017279 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Tue, 3 Sep 2024 20:04:12 +0530 Subject: [PATCH 43/60] support for archieve upload and download --- src/ansys/hps/client/jms/api/jms_api.py | 108 +++++++++++--------- src/ansys/hps/client/jms/api/project_api.py | 32 ++++-- tests/jms/test_jms_api.py | 19 +--- tests/test_services.py | 12 +-- 4 files changed, 90 insertions(+), 81 deletions(-) diff --git a/src/ansys/hps/client/jms/api/jms_api.py b/src/ansys/hps/client/jms/api/jms_api.py index 4691aa421..f1e10874c 100644 --- a/src/ansys/hps/client/jms/api/jms_api.py +++ b/src/ansys/hps/client/jms/api/jms_api.py @@ -19,6 +19,9 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +from ansys.hps.data_transfer.client.models.msg import SrcDst, StoragePath +from ansys.hps.data_transfer.client.models.ops import OperationState + """Module wrapping around the JMS root endpoints.""" import json import logging @@ -27,7 +30,6 @@ import uuid import backoff -import requests from ansys.hps.client.client import Client from ansys.hps.client.common import Object @@ -74,12 +76,12 @@ def url(self) -> str: """URL of the API.""" return f"{self.client.url}/jms/api/v1" - @property - def fs_url(self) -> str: - """URL of the file storage gateway.""" - if self._fs_url is None: - self._fs_url = _find_available_fs_url(self.get_storage()) - return self._fs_url + # @property + # def fs_url(self) -> str: + # """URL of the file storage gateway.""" + # if self._fs_url is None: + # self._fs_url = _find_available_fs_url(self.get_storage()) + # return self._fs_url def get_api_info(self): """Get information of the JMS API that the client is connected to. @@ -414,23 +416,16 @@ def _restore_project(jms_api, archive_path): if not os.path.exists(archive_path): raise HPSError(f"Project archive: path does not exist {archive_path}") - # Upload archive to FS API - archive_name = os.path.basename(archive_path) - bucket = f"hps-client-restore-{uuid.uuid4()}" - fs_file_url = f"{jms_api.client.url}/fs/api/v1/{bucket}/{archive_name}" - ansfs_file_url = f"ansfs://{bucket}/{archive_name}" # noqa: E231 - fs_headers = {"content-type": "application/octet-stream"} + log.info(f"Uploading archive {archive_path}") - log.info(f"Uploading archive to {fs_file_url}") - with open(archive_path, "rb") as file_content: - r = jms_api.client.session.post(fs_file_url, data=file_content, headers=fs_headers) + _upload_archive(jms_api, archive_path, bucket) # POST restore request - log.info(f"Restoring archive from {ansfs_file_url}") + log.info(f"Restoring archive {archive_path}") url = f"{jms_api.url}/projects/archive" - query_params = {"backend_path": ansfs_file_url} + query_params = {"backend_path": f"{bucket}/{os.path.basename(archive_path)}"} r = jms_api.client.session.post(url, params=query_params) # Monitor restore operation @@ -449,44 +444,65 @@ def _restore_project(jms_api, archive_path): # Delete archive file on server log.info(f"Delete temporary bucket {bucket}") - r = jms_api.client.session.put(f"{jms_api.client.url}/fs/api/v1/remove/{bucket}") + op = jms_api.client.dt_api.rmdir([StoragePath(path=bucket)]) + op = jms_api.client.dt_api.wait_for([op.id]) + if op[0].state != OperationState.Succeeded: + raise HPSError(f"Delete temporary bucket {bucket} failed") return get_project(jms_api.client, jms_api.url, project_id) -def _get_storages(client: Client, api_url: str) -> List[Dict]: +def _upload_archive(jms_api: JmsApi, archive_path, bucket): """ - Get a list of storages. + Uploads archive using data transfer worker. + """ - url = f"{api_url}/storage" - r = client.session.get(url) - return r.json()["backends"] + jms_api.client._start_dt_worker() + src = StoragePath(path=archive_path, remote="local") + dst = StoragePath(path=f"{bucket}/{os.path.basename(archive_path)}") -def _find_available_fs_url(file_storages: Dict) -> str: - """Find first available file storage URL.""" + op = jms_api.client.dt_api.copy([SrcDst(src=src, dst=dst)]) + op = jms_api.client.dt_api.wait_for(op.id) - if not file_storages: - raise HPSError("There is no file storage information.") + log.info(f"Operation {op[0].state}") + if op[0].state != OperationState.Succeeded: + raise HPSError(f"Upload of archive {archive_path} failed") - rest_gateways = [fs for fs in file_storages if fs["obj_type"] == "RestGateway"] - rest_gateways.sort(key=lambda fs: fs["priority"]) - if not rest_gateways: - raise HPSError("There is no file storage gateway defined.") +def _get_storages(client: Client, api_url: str) -> List[Dict]: + """ + Get a list of storages. + """ + url = f"{api_url}/storage" + r = client.session.get(url) + return r.json()["backends"] - for d in rest_gateways: - url = d["url"] - try: - r = requests.get(url, verify=False, timeout=2) - is_ansft = r.json()["ansft"] - except Exception as ex: - log.debug(ex) - continue - if r.status_code == 200 and is_ansft: - return url - raise HPSError( - f"All defined file storage gateways are unavailable" - f" ({', '.join([d['url'] for d in rest_gateways])})." - ) +# def _find_available_fs_url(file_storages: Dict) -> str: +# """Find first available file storage URL.""" +# +# if not file_storages: +# raise HPSError("There is no file storage information.") +# +# rest_gateways = [fs for fs in file_storages if fs["obj_type"] == "RestGateway"] +# rest_gateways.sort(key=lambda fs: fs["priority"]) +# +# if not rest_gateways: +# raise HPSError("There is no file storage gateway defined.") +# +# for d in rest_gateways: +# url = d["url"] +# try: +# r = requests.get(url, verify=False, timeout=2) +# is_ansft = r.json()["ansft"] +# except Exception as ex: +# log.debug(ex) +# continue +# if r.status_code == 200 and is_ansft: +# return url +# +# raise HPSError( +# f"All defined file storage gateways are unavailable" +# f" ({', '.join([d['url'] for d in rest_gateways])})." +# ) diff --git a/src/ansys/hps/client/jms/api/project_api.py b/src/ansys/hps/client/jms/api/project_api.py index 6a88918da..afa920620 100644 --- a/src/ansys/hps/client/jms/api/project_api.py +++ b/src/ansys/hps/client/jms/api/project_api.py @@ -737,10 +737,10 @@ def _upload_files(project_api: ProjectApi, files): for f in files: if getattr(f, "src", None) is None: continue - is_file = isinstance(f.src, str) and os.path.exists(f.src) - if is_file: - srcs.append(StoragePath(path=f.src, remote="local")) - dsts.append(StoragePath(path=f"{base_dir}/{os.path.basename(f.storage_id)}")) + # is_file = isinstance(f.src, str) and os.path.exists(f.src) + # if is_file: + srcs.append(StoragePath(path=f.src, remote="local")) + dsts.append(StoragePath(path=f"{base_dir}/{os.path.basename(f.storage_id)}")) if len(srcs) > 0: log.info(f"Uploading files") op = project_api.client.dt_api.copy( @@ -768,7 +768,7 @@ def _fetch_file_metadata( if op.state == OperationState.Succeeded: base_dir = project_api.project_id for f in files: - if getattr(f, "src", None) is None: + if getattr(f, "src", None) is None or isinstance(f.src, str) == False: continue md = op.result[f"{base_dir}/{os.path.basename(f.storage_id)}"] f.hash = md["checksum"] @@ -882,7 +882,7 @@ def archive_project(project_api: ProjectApi, target_path, include_job_files=True download_link = op.result["backend_path"] # Download archive - download_link = download_link.replace("ansfs://", project_api.fs_url + "/") + # download_link = download_link.replace("ansfs://", project_api.fs_url + "/") log.info(f"Project archive download link: {download_link}") if not os.path.isdir(target_path): @@ -891,16 +891,26 @@ def archive_project(project_api: ProjectApi, target_path, include_job_files=True file_path = os.path.join(target_path, download_link.rsplit("/")[-1]) log.info(f"Download archive to {file_path}") - with project_api.client.session.get(download_link, stream=True) as r: - with open(file_path, "wb") as f: - for chunk in r.iter_content(chunk_size=1024 * 1024): - if chunk: - f.write(chunk) + _download_archive(project_api, download_link, file_path) log.info(f"Done saving project archive to disk.") return file_path +def _download_archive(project_api: ProjectApi, download_link, target_path): + project_api.client._start_dt_worker() + + src = StoragePath(path=f"{download_link}") + dst = StoragePath(path=target_path, remote="local") + op = project_api.client.dt_api.copy([SrcDst(src=src, dst=dst)]) + op = project_api.client.dt_api.wait_for([op.id]) + + log.info(f"Operation {op[0].state}") + + if op[0].state != OperationState.Succeeded: + raise HPSError(f"Download of archive {download_link} failed") + + def copy_jobs(project_api: ProjectApi, jobs: List[Job], as_objects=True, **query_params): """Create jobs by copying existing jobs.""" diff --git a/tests/jms/test_jms_api.py b/tests/jms/test_jms_api.py index 4647fe6ed..20a9593c6 100644 --- a/tests/jms/test_jms_api.py +++ b/tests/jms/test_jms_api.py @@ -26,9 +26,8 @@ from marshmallow.utils import missing import pytest -from ansys.hps.client import Client, ClientError, HPSError +from ansys.hps.client import Client, ClientError from ansys.hps.client.jms import JmsApi, ProjectApi -from ansys.hps.client.jms.api.jms_api import _find_available_fs_url from ansys.hps.client.jms.resource import ( FloatParameterDefinition, IntParameterDefinition, @@ -45,7 +44,6 @@ def test_jms_api_info(client): jms_api = JmsApi(client) assert jms_api.url.endswith("/jms/api/v1") - assert jms_api.fs_url.endswith("/fs/api/v1") info = jms_api.get_api_info() assert "services" in info @@ -53,21 +51,6 @@ def test_jms_api_info(client): assert "settings" in info assert "time" in info - -def test_unavailable_fs_url(client): - - storage_config = JmsApi(client).get_storage() - - for config in storage_config: - if config["obj_type"] == "RestGateway": - config["url"] = config["url"].replace("v1", "v234") - - with pytest.raises(HPSError) as ex_info: - _find_available_fs_url(storage_config) - - assert "unavailable" in str(ex_info.value) - - def test_jms_api(client): log.debug("=== Client ===") diff --git a/tests/test_services.py b/tests/test_services.py index e8ec5e78d..a6201d70d 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -47,11 +47,11 @@ def test_services(client: Client, build_info_path: str): assert "execution_script_bucket" in jms_info["settings"] assert "execution_script_default_bucket" in jms_info["settings"] - # check file storage api - r = client.session.get(f"{client.rep_url}/fs/api/v1") - fs_info = r.json() - log.info(f"FS api info\n{json.dumps(fs_info, indent=2)}") - assert "build" in fs_info + # check dts api + r = client.session.get(f"{client.rep_url}/dt/api/v1") + dt_info = r.json() + log.info(f"Dt api info\n{json.dumps(dt_info, indent=2)}") + assert "build_info" in dt_info # check rms api rms_api = RmsApi(client) @@ -60,7 +60,7 @@ def test_services(client: Client, build_info_path: str): assert "build" in rms_info assert "version" in rms_info["build"] - info = {"jms": jms_info, "fs": fs_info, "rms": rms_info} + info = {"jms": jms_info, "dt": dt_info, "rms": rms_info} with open(build_info_path, "w") as f: f.write(json.dumps(info, indent=2)) From 0c21fce65f2659e5e27983a706280d59e4a99870 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 14:36:13 +0000 Subject: [PATCH 44/60] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- tests/jms/test_jms_api.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/jms/test_jms_api.py b/tests/jms/test_jms_api.py index 20a9593c6..61228edac 100644 --- a/tests/jms/test_jms_api.py +++ b/tests/jms/test_jms_api.py @@ -51,6 +51,7 @@ def test_jms_api_info(client): assert "settings" in info assert "time" in info + def test_jms_api(client): log.debug("=== Client ===") From 3a703bab19076f772f69c633b7ec409291bb04f2 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Wed, 4 Sep 2024 10:40:53 +0530 Subject: [PATCH 45/60] handle file input as stream io --- src/ansys/hps/client/jms/api/project_api.py | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/src/ansys/hps/client/jms/api/project_api.py b/src/ansys/hps/client/jms/api/project_api.py index afa920620..2f66837cb 100644 --- a/src/ansys/hps/client/jms/api/project_api.py +++ b/src/ansys/hps/client/jms/api/project_api.py @@ -20,6 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. """Module exposing the project endpoints of the JMS.""" +import io import json import logging import os @@ -732,14 +733,21 @@ def _upload_files(project_api: ProjectApi, files): project_api.client._start_dt_worker() srcs = [] dsts = [] + tempFiles = [] base_dir = project_api.project_id + filePath = "" for f in files: if getattr(f, "src", None) is None: continue - # is_file = isinstance(f.src, str) and os.path.exists(f.src) - # if is_file: - srcs.append(StoragePath(path=f.src, remote="local")) + filePath = f.src + if isinstance(f.src, io.IOBase): + tempFiles.append(f.storage_id) + mode = "wb" if isinstance(f.src, io.BytesIO) else "w" + with open(f.storage_id, mode) as out: + out.write(f.src.getvalue()) + filePath = f.storage_id + srcs.append(StoragePath(path=filePath, remote="local")) dsts.append(StoragePath(path=f"{base_dir}/{os.path.basename(f.storage_id)}")) if len(srcs) > 0: log.info(f"Uploading files") @@ -754,6 +762,11 @@ def _upload_files(project_api: ProjectApi, files): log.error(f"Upload of files failed") raise HPSError(f"Upload of files failed") + if len(tempFiles) > 0: + for file in tempFiles: + if os.path.exists(file): + os.remove(file) + else: log.info("No files to upload") @@ -768,7 +781,7 @@ def _fetch_file_metadata( if op.state == OperationState.Succeeded: base_dir = project_api.project_id for f in files: - if getattr(f, "src", None) is None or isinstance(f.src, str) == False: + if getattr(f, "src", None) is None: continue md = op.result[f"{base_dir}/{os.path.basename(f.storage_id)}"] f.hash = md["checksum"] From 688bd4208c44b43692f0961f08a925ff7d2ca038 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Wed, 4 Sep 2024 10:42:14 +0530 Subject: [PATCH 46/60] updated action yaml --- .github/actions/hps_services/action.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 182e08eb1..319e11881 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -84,7 +84,7 @@ runs: cd docker-compose docker-compose build docker-compose up -d - working-directory: ./ + working-directory: ./docker-compose-artifact - name: Start services (internal package) if: ${{ inputs.version == 'latest-dev' }} @@ -102,7 +102,7 @@ runs: - name: Wait for services shell: bash run: | - curl -k --head -X GET --retry 30 --retry-all-errors --retry-delay 5 https://localhost:8443/hps/jms/api/v1 + curl -k --head -X GET --retry 30 --retry-connrefused --retry-delay 5 https://localhost:8443/hps/jms/api/v1 curl -k --head -X GET --retry 30 --retry-connrefused --retry-delay 1 https://localhost:8443/hps/fs/api/v1 curl -k --head -X GET --retry 30 --retry-connrefused --retry-delay 1 https://localhost:8443/hps/rms/api/v1 @@ -112,4 +112,4 @@ runs: run: | echo "url=https://localhost:8443/hps" >> $GITHUB_OUTPUT echo "path=$(pwd)" >> $GITHUB_OUTPUT - working-directory: ./docker-compose-artifact/docker-compose + working-directory: ./docker-compose-artifact/docker-compose \ No newline at end of file From e5c275bd8c377a2d463c782d62b46726d929defa Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Wed, 4 Sep 2024 18:21:58 +0530 Subject: [PATCH 47/60] updated code as per review observations --- src/ansys/hps/client/__version__.py | 4 --- src/ansys/hps/client/client.py | 9 ++---- src/ansys/hps/client/jms/api/jms_api.py | 36 --------------------- src/ansys/hps/client/jms/api/project_api.py | 31 +++++++++--------- 4 files changed, 19 insertions(+), 61 deletions(-) diff --git a/src/ansys/hps/client/__version__.py b/src/ansys/hps/client/__version__.py index c48c4d39b..45d3f38be 100644 --- a/src/ansys/hps/client/__version__.py +++ b/src/ansys/hps/client/__version__.py @@ -29,10 +29,6 @@ # major, minor, patch __version__ = importlib_metadata.version("ansys-hps-client") -__company__ = "Ansys Switzerland GmbH" - -__company_short__ = "Ansys" - # this is only a convenience to default the version # of Ansys simulation applications in PyHPS examples __ansys_apps_version__ = "2024 R2" diff --git a/src/ansys/hps/client/client.py b/src/ansys/hps/client/client.py index 3541e235b..ce00d3a80 100644 --- a/src/ansys/hps/client/client.py +++ b/src/ansys/hps/client/client.py @@ -19,7 +19,6 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -from ansys.hps.client.__version__ import __company_short__ """Module providing the Python client to the HPS APIs.""" @@ -105,8 +104,7 @@ class Client(object): >>> cl = Client( ... url="https://localhost:8443/hps", ... username="repuser", - ... password="repuser", - dts_url="https://localhost:8443/hps/dt/api/v1" + ... password="repuser" ... ) Create a client object and connect to HPS with a refresh token. @@ -114,8 +112,7 @@ class Client(object): >>> cl = Client( ... url="https://localhost:8443/hps", ... username="repuser", - ... refresh_token="eyJhbGciOiJIUzI1NiIsInR5cC...", - dts_url="https://localhost:8443/hps/dt/api/v1" + ... refresh_token="eyJhbGciOiJIUzI1NiIsInR5cC..." >>> ) """ @@ -304,7 +301,7 @@ def _start_dt_worker(self): try: log.info("Starting Data Transfer client.") # start Data transfer client - self.dt_client = DTClient(download_dir=self._get_download_dir(__company_short__)) + self.dt_client = DTClient(download_dir=self._get_download_dir("Ansys")) self.dt_client.binary_config.update( verbosity=3, diff --git a/src/ansys/hps/client/jms/api/jms_api.py b/src/ansys/hps/client/jms/api/jms_api.py index f1e10874c..831f39ff8 100644 --- a/src/ansys/hps/client/jms/api/jms_api.py +++ b/src/ansys/hps/client/jms/api/jms_api.py @@ -76,13 +76,6 @@ def url(self) -> str: """URL of the API.""" return f"{self.client.url}/jms/api/v1" - # @property - # def fs_url(self) -> str: - # """URL of the file storage gateway.""" - # if self._fs_url is None: - # self._fs_url = _find_available_fs_url(self.get_storage()) - # return self._fs_url - def get_api_info(self): """Get information of the JMS API that the client is connected to. @@ -477,32 +470,3 @@ def _get_storages(client: Client, api_url: str) -> List[Dict]: url = f"{api_url}/storage" r = client.session.get(url) return r.json()["backends"] - - -# def _find_available_fs_url(file_storages: Dict) -> str: -# """Find first available file storage URL.""" -# -# if not file_storages: -# raise HPSError("There is no file storage information.") -# -# rest_gateways = [fs for fs in file_storages if fs["obj_type"] == "RestGateway"] -# rest_gateways.sort(key=lambda fs: fs["priority"]) -# -# if not rest_gateways: -# raise HPSError("There is no file storage gateway defined.") -# -# for d in rest_gateways: -# url = d["url"] -# try: -# r = requests.get(url, verify=False, timeout=2) -# is_ansft = r.json()["ansft"] -# except Exception as ex: -# log.debug(ex) -# continue -# if r.status_code == 200 and is_ansft: -# return url -# -# raise HPSError( -# f"All defined file storage gateways are unavailable" -# f" ({', '.join([d['url'] for d in rest_gateways])})." -# ) diff --git a/src/ansys/hps/client/jms/api/project_api.py b/src/ansys/hps/client/jms/api/project_api.py index 2f66837cb..b70261cd1 100644 --- a/src/ansys/hps/client/jms/api/project_api.py +++ b/src/ansys/hps/client/jms/api/project_api.py @@ -19,12 +19,13 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +import tempfile + """Module exposing the project endpoints of the JMS.""" import io import json import logging import os -import shutil from typing import Callable, List, Type, Union import warnings @@ -678,8 +679,10 @@ def _download_files(project_api: ProjectApi, files: List[File]): """ + temp_dir = tempfile.TemporaryDirectory() + project_api.client._start_dt_worker() - out_path = os.path.join(os.path.dirname(__file__), "downloads") + out_path = os.path.join(temp_dir.name, "downloads") base_dir = project_api.project_id srcs = [] @@ -709,10 +712,6 @@ def _download_files(project_api: ProjectApi, files: List[File]): log.error(f"Download of files failed") raise HPSError(f"Download of files failed") - # Delete temporary folder - if os.path.exists(out_path): - shutil.rmtree(out_path) - def get_files(project_api: ProjectApi, as_objects=True, content=False, **query_params): """Get files for the project API.""" @@ -733,20 +732,20 @@ def _upload_files(project_api: ProjectApi, files): project_api.client._start_dt_worker() srcs = [] dsts = [] - tempFiles = [] base_dir = project_api.project_id filePath = "" + temp_dir = tempfile.TemporaryDirectory() + for f in files: if getattr(f, "src", None) is None: continue filePath = f.src if isinstance(f.src, io.IOBase): - tempFiles.append(f.storage_id) mode = "wb" if isinstance(f.src, io.BytesIO) else "w" - with open(f.storage_id, mode) as out: + with open(os.path.join(temp_dir.name, f.storage_id), mode) as out: out.write(f.src.getvalue()) - filePath = f.storage_id + filePath = os.path.join(temp_dir.name, f.storage_id) srcs.append(StoragePath(path=filePath, remote="local")) dsts.append(StoragePath(path=f"{base_dir}/{os.path.basename(f.storage_id)}")) if len(srcs) > 0: @@ -762,11 +761,6 @@ def _upload_files(project_api: ProjectApi, files): log.error(f"Upload of files failed") raise HPSError(f"Upload of files failed") - if len(tempFiles) > 0: - for file in tempFiles: - if os.path.exists(file): - os.remove(file) - else: log.info("No files to upload") @@ -847,6 +841,10 @@ def _download_file( log.info(f"Downloading file {file.id}") src = StoragePath(path=f"{base_dir}/{os.path.basename(file.storage_id)}") dst = StoragePath(path=download_path, remote="local") + + if progress_handler is not None: + progress_handler(0) + op = project_api.client.dt_api.copy([SrcDst(src=src, dst=dst)]) op = project_api.client.dt_api.wait_for([op.id]) @@ -856,6 +854,9 @@ def _download_file( log.error(f"Download of file {file.evaluation_path} with id {file.id} failed") return None + if progress_handler is not None: + progress_handler(file.size) + return download_path From dab97d99687bc22eae9d1bcb167fc69463ecfa95 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Wed, 4 Sep 2024 18:27:10 +0530 Subject: [PATCH 48/60] updated profile --- .github/actions/hps_services/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 319e11881..928f6211a 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -96,7 +96,7 @@ runs: tar -xvzf docker-compose-internal.tar.gz cd docker-compose FSGATEWAY_TAG=latest-dev JMS_TAG=mgunupa-integration DTS_TAG=latest-dev docker-compose pull - FSGATEWAY_TAG=latest-dev JMS_TAG=mgunupa-integration DTS_TAG=latest-dev docker-compose --profile=stable --profile=data-transfer up -d + FSGATEWAY_TAG=latest-dev JMS_TAG=mgunupa-integration DTS_TAG=latest-dev docker-compose --profile=stable --profile=data-transfer-minio up -d working-directory: ./docker-compose-artifact - name: Wait for services From 3357d75b29a86848af58703421ce62b40b75d0d6 Mon Sep 17 00:00:00 2001 From: Raja Pise Date: Thu, 5 Sep 2024 09:25:55 +0530 Subject: [PATCH 49/60] Updated github actions --- .github/actions/hps_services/action.yml | 27 +++++++++---------------- .github/workflows/ci_cd.yml | 1 - .github/workflows/tests.yml | 5 +++++ 3 files changed, 15 insertions(+), 18 deletions(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 928f6211a..997e17929 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -43,25 +43,18 @@ runs: version: tags/${{ inputs.version }} token: ${{ inputs.token }} -# - if: ${{ inputs.version == 'latest-dev' }} -# uses: dsaltares/fetch-gh-release-asset@master -# name: Download Internal Services Artifact -# with: -# file: 'docker-compose-internal.tar.gz' -# target: 'docker-compose-artifact/docker-compose-internal.tar.gz' -# repo: ansys-internal/rep-deployments@mpawlik/dt_integration -# version: tags/latest-dev -# token: ${{ inputs.token }} - - uses: dawidd6/action-download-artifact@v3 + - if: ${{ inputs.version == 'latest-dev' }} + uses: dawidd6/action-download-artifact@v3 + name: Download Internal Services Artifact with: workflow: main.yaml - name: docker-compose-internal - branch: mpawlik/dt_integration - repo: ansys-internal/rep-deployments - workflow_conclusion: success - search_artifacts: false - path: ./docker-compose-artifact - github_token: ${{ inputs.token }} + name: docker-compose-internal + branch: ${{ inputs.feature }} + repo: ansys-internal/rep-deployments + workflow_conclusion: success + search_artifacts: false + path: ./docker-compose-artifact + github_token: ${{ inputs.token }} - uses: KengoTODA/actions-setup-docker-compose@main env: diff --git a/.github/workflows/ci_cd.yml b/.github/workflows/ci_cd.yml index e80c79713..acd4f2366 100644 --- a/.github/workflows/ci_cd.yml +++ b/.github/workflows/ci_cd.yml @@ -138,7 +138,6 @@ jobs: library-name: ${{ env.PACKAGE_NAME }} operating-system: ${{ matrix.os }} python-version: ${{ matrix.python-version }} - check-licenses: false package: name: Package library diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 7abb27728..0668c828d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -19,6 +19,10 @@ on: description: HPS version to test against type: string default: 'latest-dev' + hps-feature: + description: HPS Feature to test against + type: string + default: 'mpawlik/dt_integration' jobs: @@ -51,6 +55,7 @@ jobs: ghcr-username: ${{ secrets.PYANSYS_CI_BOT_USERNAME }} ghcr-token: ${{ secrets.PYANSYS_CI_BOT_PACKAGE_TOKEN }} version: ${{ inputs.hps-version }} + feature: ${{ inputs.hps-feature }} - name: Test with tox run: tox -e ${{ inputs.toxenv }}-coverage From ae3deb267761b47becab7683dbcad726c8bd935e Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Thu, 5 Sep 2024 09:31:45 +0530 Subject: [PATCH 50/60] Update tests.yml --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 0668c828d..c795d06f6 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -55,7 +55,7 @@ jobs: ghcr-username: ${{ secrets.PYANSYS_CI_BOT_USERNAME }} ghcr-token: ${{ secrets.PYANSYS_CI_BOT_PACKAGE_TOKEN }} version: ${{ inputs.hps-version }} - feature: ${{ inputs.hps-feature }} + feature: ${{ inputs.hps-feature }} - name: Test with tox run: tox -e ${{ inputs.toxenv }}-coverage From f92cf187b4e32c7e050fb391e03592f622b949d9 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Thu, 5 Sep 2024 09:33:19 +0530 Subject: [PATCH 51/60] Update tests.yml --- .github/workflows/tests.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index c795d06f6..7451809ec 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -55,8 +55,8 @@ jobs: ghcr-username: ${{ secrets.PYANSYS_CI_BOT_USERNAME }} ghcr-token: ${{ secrets.PYANSYS_CI_BOT_PACKAGE_TOKEN }} version: ${{ inputs.hps-version }} - feature: ${{ inputs.hps-feature }} - + feature: ${{ inputs.hps-feature }} + - name: Test with tox run: tox -e ${{ inputs.toxenv }}-coverage env: From adb6a045713b12fd00bfe4254c3cf9c32bd8a24c Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Thu, 5 Sep 2024 09:34:32 +0530 Subject: [PATCH 52/60] Update action.yml --- .github/actions/hps_services/action.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index 997e17929..f6abe41ae 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -48,13 +48,13 @@ runs: name: Download Internal Services Artifact with: workflow: main.yaml - name: docker-compose-internal - branch: ${{ inputs.feature }} - repo: ansys-internal/rep-deployments - workflow_conclusion: success - search_artifacts: false - path: ./docker-compose-artifact - github_token: ${{ inputs.token }} + name: docker-compose-internal + branch: ${{ inputs.feature }} + repo: ansys-internal/rep-deployments + workflow_conclusion: success + search_artifacts: false + path: ./docker-compose-artifact + github_token: ${{ inputs.token }} - uses: KengoTODA/actions-setup-docker-compose@main env: @@ -105,4 +105,4 @@ runs: run: | echo "url=https://localhost:8443/hps" >> $GITHUB_OUTPUT echo "path=$(pwd)" >> $GITHUB_OUTPUT - working-directory: ./docker-compose-artifact/docker-compose \ No newline at end of file + working-directory: ./docker-compose-artifact/docker-compose From 7486c70855814ae25c6267251effd74ebcbe12d2 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Thu, 5 Sep 2024 09:44:06 +0530 Subject: [PATCH 53/60] Update ci_cd.yml --- .github/workflows/ci_cd.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci_cd.yml b/.github/workflows/ci_cd.yml index acd4f2366..761d36ead 100644 --- a/.github/workflows/ci_cd.yml +++ b/.github/workflows/ci_cd.yml @@ -10,6 +10,9 @@ on: - 'v1.0.2' - 'v1.0.3' - 'latest-dev' + hps-feature: + description: HPS feature to test against + default: 'string' pull_request: push: @@ -57,6 +60,7 @@ jobs: toxenv: ${{ matrix.cfg.toxenv }} runner: ${{ matrix.os }} hps-version: ${{ inputs.hps-version || 'latest-dev' }} + hps-feature: ${{ inputs.hps-feature }} docs: name: Documentation From a3ae704b2031e4487c474d04d8416cfca1a823bd Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Thu, 5 Sep 2024 09:45:20 +0530 Subject: [PATCH 54/60] Update ci_cd.yml --- .github/workflows/ci_cd.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci_cd.yml b/.github/workflows/ci_cd.yml index 761d36ead..6b3b3a5e5 100644 --- a/.github/workflows/ci_cd.yml +++ b/.github/workflows/ci_cd.yml @@ -12,7 +12,8 @@ on: - 'latest-dev' hps-feature: description: HPS feature to test against - default: 'string' + type: 'string' + default: 'mpawlik/dt_integration' pull_request: push: From 4274eca8748c16fcf68cbc229274c213c1ee5903 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Thu, 5 Sep 2024 09:51:27 +0530 Subject: [PATCH 55/60] Update action.yml --- .github/actions/hps_services/action.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index f6abe41ae..ab66e5296 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -8,6 +8,10 @@ inputs: description: 'HPS version' required: true + feature: + description: 'HPS feature' + required: true + token: description: 'Token' required: true From 7b1b77afb55a29465fd884b3917b7dd68b4e9f86 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Thu, 5 Sep 2024 10:34:56 +0530 Subject: [PATCH 56/60] Update ci_cd.yml --- .github/workflows/ci_cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci_cd.yml b/.github/workflows/ci_cd.yml index 6b3b3a5e5..784467f84 100644 --- a/.github/workflows/ci_cd.yml +++ b/.github/workflows/ci_cd.yml @@ -20,7 +20,7 @@ on: tags: - "*" branches: - - main + - rajapise/dts-integration env: MAIN_PYTHON_VERSION: '3.10' From fa12c618a629c3ac9654b000cc4b6239ee425015 Mon Sep 17 00:00:00 2001 From: RajaPiseD <115138235+RajaPiseD@users.noreply.github.com> Date: Thu, 5 Sep 2024 10:38:29 +0530 Subject: [PATCH 57/60] Update ci_cd.yml --- .github/workflows/ci_cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci_cd.yml b/.github/workflows/ci_cd.yml index 784467f84..6b3b3a5e5 100644 --- a/.github/workflows/ci_cd.yml +++ b/.github/workflows/ci_cd.yml @@ -20,7 +20,7 @@ on: tags: - "*" branches: - - rajapise/dts-integration + - main env: MAIN_PYTHON_VERSION: '3.10' From b209cc579c4053a279931feeafb901929cadc737 Mon Sep 17 00:00:00 2001 From: Federico Negri Date: Thu, 5 Sep 2024 08:54:49 +0200 Subject: [PATCH 58/60] adjust hps-feature defaults --- .github/actions/hps_services/action.yml | 6 +++--- .github/workflows/ci_cd.yml | 4 ++-- .github/workflows/tests.yml | 4 ++-- src/ansys/hps/client/__version__.py | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index ab66e5296..af3cfbf8b 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -9,8 +9,8 @@ inputs: required: true feature: - description: 'HPS feature' - required: true + description: 'HPS feature (only for latest-dev version)' + required: false token: description: 'Token' @@ -53,7 +53,7 @@ runs: with: workflow: main.yaml name: docker-compose-internal - branch: ${{ inputs.feature }} + branch: ${{ inputs.feature || 'main'}} repo: ansys-internal/rep-deployments workflow_conclusion: success search_artifacts: false diff --git a/.github/workflows/ci_cd.yml b/.github/workflows/ci_cd.yml index 6b3b3a5e5..0873a65a0 100644 --- a/.github/workflows/ci_cd.yml +++ b/.github/workflows/ci_cd.yml @@ -13,7 +13,7 @@ on: hps-feature: description: HPS feature to test against type: 'string' - default: 'mpawlik/dt_integration' + default: 'main' pull_request: push: @@ -61,7 +61,7 @@ jobs: toxenv: ${{ matrix.cfg.toxenv }} runner: ${{ matrix.os }} hps-version: ${{ inputs.hps-version || 'latest-dev' }} - hps-feature: ${{ inputs.hps-feature }} + hps-feature: ${{ inputs.hps-feature || 'mpawlik/dt_integration' }} # revert to 'main' before merging docs: name: Documentation diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 7451809ec..62c32a630 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -20,9 +20,9 @@ on: type: string default: 'latest-dev' hps-feature: - description: HPS Feature to test against + description: HPS Feature to test against (only for latest-dev version) type: string - default: 'mpawlik/dt_integration' + default: 'main' jobs: diff --git a/src/ansys/hps/client/__version__.py b/src/ansys/hps/client/__version__.py index 45d3f38be..130cba281 100644 --- a/src/ansys/hps/client/__version__.py +++ b/src/ansys/hps/client/__version__.py @@ -31,4 +31,4 @@ # this is only a convenience to default the version # of Ansys simulation applications in PyHPS examples -__ansys_apps_version__ = "2024 R2" +__ansys_apps_version__ = "2024 R1" From fcb023a457693bdeb6420b93565ef077f395583e Mon Sep 17 00:00:00 2001 From: Federico Negri Date: Thu, 5 Sep 2024 10:19:48 +0200 Subject: [PATCH 59/60] prep for merge --- .github/workflows/ci_cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci_cd.yml b/.github/workflows/ci_cd.yml index 0873a65a0..a46b73c66 100644 --- a/.github/workflows/ci_cd.yml +++ b/.github/workflows/ci_cd.yml @@ -61,7 +61,7 @@ jobs: toxenv: ${{ matrix.cfg.toxenv }} runner: ${{ matrix.os }} hps-version: ${{ inputs.hps-version || 'latest-dev' }} - hps-feature: ${{ inputs.hps-feature || 'mpawlik/dt_integration' }} # revert to 'main' before merging + hps-feature: ${{ inputs.hps-feature || 'main' }} docs: name: Documentation From eb3dd4b237e3cd9ddb540ac9c9e16fac195c193f Mon Sep 17 00:00:00 2001 From: Federico Negri Date: Thu, 5 Sep 2024 10:27:51 +0200 Subject: [PATCH 60/60] remove compose tags --- .github/actions/hps_services/action.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/hps_services/action.yml b/.github/actions/hps_services/action.yml index af3cfbf8b..788dc6d68 100644 --- a/.github/actions/hps_services/action.yml +++ b/.github/actions/hps_services/action.yml @@ -92,8 +92,8 @@ runs: ls -la tar -xvzf docker-compose-internal.tar.gz cd docker-compose - FSGATEWAY_TAG=latest-dev JMS_TAG=mgunupa-integration DTS_TAG=latest-dev docker-compose pull - FSGATEWAY_TAG=latest-dev JMS_TAG=mgunupa-integration DTS_TAG=latest-dev docker-compose --profile=stable --profile=data-transfer-minio up -d + docker-compose pull + docker-compose up -d working-directory: ./docker-compose-artifact - name: Wait for services