From 5b59ff62a57f752be6d50291e2f68bcc11d87918 Mon Sep 17 00:00:00 2001 From: Paul Walters Date: Fri, 25 Jul 2025 13:35:05 -0400 Subject: [PATCH 01/10] Added import thermal signal API --- src/ansys/sherlock/core/lifecycle.py | 56 ++++++ .../sherlock/core/types/lifecycle_types.py | 110 ++++++++++++ tests/test_lifecycle.py | 161 +++++++++++++++++- 3 files changed, 324 insertions(+), 3 deletions(-) create mode 100644 src/ansys/sherlock/core/types/lifecycle_types.py diff --git a/src/ansys/sherlock/core/lifecycle.py b/src/ansys/sherlock/core/lifecycle.py index 7add08f91..0e351a569 100644 --- a/src/ansys/sherlock/core/lifecycle.py +++ b/src/ansys/sherlock/core/lifecycle.py @@ -2,9 +2,11 @@ """Module containing all life cycle management capabilities.""" try: + import SherlockCommonService_pb2 import SherlockLifeCycleService_pb2 import SherlockLifeCycleService_pb2_grpc except ModuleNotFoundError: + from ansys.api.sherlock.v0 import SherlockCommonService_pb2 from ansys.api.sherlock.v0 import SherlockLifeCycleService_pb2 from ansys.api.sherlock.v0 import SherlockLifeCycleService_pb2_grpc @@ -35,6 +37,7 @@ SherlockNoGrpcConnectionException, ) from ansys.sherlock.core.grpc_stub import GrpcStub +from ansys.sherlock.core.types.lifecycle_types import ImportThermalSignalRequest from ansys.sherlock.core.utils.version_check import require_version @@ -2107,3 +2110,56 @@ def load_shock_profile_pulses( for error in e.str_itr(): LOG.error(error) raise e + + @require_version(261) + def import_thermal_signal( + self, request: ImportThermalSignalRequest + ) -> SherlockCommonService_pb2.ReturnCode: + """Import a thermal signal to a life cycle phase. + + Available Since: 2026R1 + + Parameters + ---------- + request: ImportThermalSignalRequest + Request object containing the information needed to import a thermal signal. + + Returns + ------- + SherlockCommonService_pb2.ReturnCode + Status code of the response. 0 for success. + + Examples + -------- + >>> from ansys.sherlock.core.types.lifecycle_types import ImportThermalSignalRequest + >>> from ansys.sherlock.core.types.lifecycle_types import ThermalSignalFileProperties + >>> from ansys.sherlock.core.launcher import launch_sherlock + >>> sherlock = launch_sherlock() + >>> response = sherlock.lifecycle.import_thermal_signal( + >>> ImportThermalSignalRequest( + >>> file_name="/path/to/thermal_signal_file.csv", + >>> project="TestProject", + >>> thermal_signal_file_properties=ThermalSignalFileProperties( + >>> header_row_count=0, + >>> numeric_format="English", + >>> column_delimiter=",", + >>> time_column="Time", + >>> time_units="sec", + >>> temperature_column="Temperature", + >>> temperature_units="C" + >>> ), + >>> phase_name=phaseName, + >>> time_removal= False, + >>> load_range_percentage=0.25, + >>> number_of_bins=0, + >>> filtering_limit=0.0, + >>> generated_cycles_label="Second Generated Cycles from Python", + >>> ) + >>> ) + """ + import_thermal_signal_request = request._convert_to_grpc() + + if not self._is_connection_up(): + raise SherlockNoGrpcConnectionException() + + return self.stub.importThermalSignal(import_thermal_signal_request) diff --git a/src/ansys/sherlock/core/types/lifecycle_types.py b/src/ansys/sherlock/core/types/lifecycle_types.py new file mode 100644 index 000000000..fd65f4f1d --- /dev/null +++ b/src/ansys/sherlock/core/types/lifecycle_types.py @@ -0,0 +1,110 @@ +# Copyright (C) 2021 - 2025 ANSYS, Inc. and/or its affiliates. + +"""Module containing types for the Lifecycle Service.""" + +from pydantic import BaseModel, ValidationInfo, field_validator + +from ansys.sherlock.core.types.common_types import basic_str_validator + +try: + import SherlockLifeCycleService_pb2 +except ModuleNotFoundError: + from ansys.api.sherlock.v0 import SherlockLifeCycleService_pb2 + + +class ThermalSignalFileProperties(BaseModel): + """Properties of a thermal signal file.""" + + header_row_count: int + """Number of rows before the column header in the file.""" + numeric_format: str + """Numeric format for the values.""" + column_delimiter: str + """Delimiter used to separate columns in the file.""" + time_column: str + """Name of the column containing time values.""" + time_units: str + """Units of the time values.""" + temperature_column: str + """Name of the column containing temperature values.""" + temperature_units: str + """Units of the temperature values.""" + + def _convert_to_grpc( + self, + ) -> SherlockLifeCycleService_pb2.ImportThermalSignalRequest.ThermalSignalFileProperties: + """Convert to gRPC ThermalSignalFileProperties.""" + return SherlockLifeCycleService_pb2.ImportThermalSignalRequest.ThermalSignalFileProperties( + headerRowCount=self.header_row_count, + numericFormat=self.numeric_format, + columnDelimiter=self.column_delimiter, + timeColumn=self.time_column, + timeUnits=self.time_units, + temperatureColumn=self.temperature_column, + temperatureUnits=self.temperature_units, + ) + + @field_validator("header_row_count") + @classmethod + def non_negative_int_validation(cls, value: int, info: ValidationInfo): + """Validate integer fields listed contain non-negative values.""" + if value < 0: + raise ValueError(f"{info.field_name} must be greater than or equal to 0.") + return value + + @field_validator("time_column", "time_units", "temperature_column", "temperature_units") + @classmethod + def str_validation(cls, value: str, info: ValidationInfo): + """Validate string fields listed.""" + return basic_str_validator(value, info.field_name) + + +class ImportThermalSignalRequest(BaseModel): + """Request to import a thermal signal file.""" + + file_name: str + """The full path to the CSV thermal signal file to be imported.""" + project: str + """Sherlock project name in which the thermal signal is imported.""" + phase_name: str + """Name of the phase in which the thermal signal is imported.""" + thermal_signal_file_properties: ThermalSignalFileProperties + """Properties of the thermal signal file.""" + time_removal: bool + """Option to indicate that time results with shorter half-cycle durations are removed.""" + load_range_percentage: float + """Defines the fraction of the range near peaks and valleys considered as a dwell region""" + number_of_bins: int + """Number of bins for binning cycles, 0 for no binning""" + filtering_limit: float + """Minimum cycle range to include in results, 0 for not filtering""" + generated_cycles_label: str + """Label used to define the name of all generated thermal events.""" + + @field_validator("file_name", "project", "phase_name", "generated_cycles_label") + @classmethod + def str_validation(cls, value: str, info: ValidationInfo): + """Validate string fields listed.""" + return basic_str_validator(value, info.field_name) + + @field_validator("number_of_bins") + @classmethod + def non_negative_int_validation(cls, value: int, info: ValidationInfo): + """Validate integer fields listed contain non-negative values.""" + if value < 0: + raise ValueError(f"{info.field_name} must be greater than or equal to 0.") + return value + + def _convert_to_grpc(self) -> SherlockLifeCycleService_pb2.ImportThermalSignalRequest: + """Convert to gRPC ImportThermalSignalRequest.""" + return SherlockLifeCycleService_pb2.ImportThermalSignalRequest( + thermalSignalFile=self.file_name, + project=self.project, + phaseName=self.phase_name, + fileProperties=self.thermal_signal_file_properties._convert_to_grpc(), + timeRemoval=self.time_removal, + loadRangePercentage=self.load_range_percentage, + numberOfBins=self.number_of_bins, + filteringLimit=self.filtering_limit, + generatedCyclesLabel=self.generated_cycles_label, + ) diff --git a/tests/test_lifecycle.py b/tests/test_lifecycle.py index de714dbf3..fd8493edf 100644 --- a/tests/test_lifecycle.py +++ b/tests/test_lifecycle.py @@ -1,9 +1,9 @@ # Copyright (C) 2021 - 2025 ANSYS, Inc. and/or its affiliates. -from typing import cast import uuid import grpc +import pydantic import pytest from ansys.sherlock.core.errors import ( @@ -23,6 +23,10 @@ SherlockLoadThermalProfileError, ) from ansys.sherlock.core.lifecycle import Lifecycle +from ansys.sherlock.core.types.lifecycle_types import ( + ImportThermalSignalRequest, + ThermalSignalFileProperties, +) from ansys.sherlock.core.utils.version_check import SKIP_VERSION_CHECK @@ -46,6 +50,7 @@ def test_all(): helper_test_load_harmonic_profile(lifecycle) helper_test_load_shock_profile_dataset(lifecycle) helper_test_load_shock_profile_pulses(lifecycle) + helper_test_import_thermal_signal(lifecycle) def helper_test_create_life_phase(lifecycle: Lifecycle): @@ -2047,8 +2052,158 @@ def helper_test_load_shock_profile_pulses(lifecycle: Lifecycle): pytest.fail("No exception raised when using an invalid parameter") except Exception as e: assert type(e) == SherlockLoadShockProfilePulsesError - load_error = cast(SherlockLoadShockProfilePulsesError, e) - assert len(load_error.error_array) == 1 + + +def helper_test_import_thermal_signal(lifecycle: Lifecycle): + try: + lifecycle.import_thermal_signal( + ImportThermalSignalRequest( + file_name="", + project="Tutorial Project", + thermal_signal_file_properties=ThermalSignalFileProperties( + header_row_count=0, + numeric_format="English", + column_delimiter=",", + time_column="Time", + time_units="sec", + temperature_column="Temperature", + temperature_units="C", + ), + phase_name="Environmental", + time_removal=False, + load_range_percentage=0.25, + number_of_bins=0, + filtering_limit=0.0, + generated_cycles_label="Generated Cycles from pySherlock", + ) + ) + pytest.fail("No exception raised when using a missing file_name parameter") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, file_name is invalid because it is None or empty." + ) + + try: + lifecycle.import_thermal_signal( + ImportThermalSignalRequest( + file_name="C:/Temp/ThermalSignalMissing.csv", + project="", + thermal_signal_file_properties=ThermalSignalFileProperties( + header_row_count=0, + numeric_format="English", + column_delimiter=",", + time_column="Time", + time_units="sec", + temperature_column="Temperature", + temperature_units="C", + ), + phase_name="Environmental", + time_removal=False, + load_range_percentage=0.25, + number_of_bins=0, + filtering_limit=0.0, + generated_cycles_label="Generated Cycles from pySherlock", + ) + ) + pytest.fail("No exception raised when using a missing project parameter") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, project is invalid because it is None or empty." + ) + + try: + lifecycle.import_thermal_signal( + ImportThermalSignalRequest( + file_name="C:/Temp/ThermalSignalMissing.csv", + project="Tutorial Project", + thermal_signal_file_properties=ThermalSignalFileProperties( + header_row_count=0, + numeric_format="English", + column_delimiter=",", + time_column="Time", + time_units="sec", + temperature_column="Temperature", + temperature_units="C", + ), + phase_name="", + time_removal=False, + load_range_percentage=0.25, + number_of_bins=0, + filtering_limit=0.0, + generated_cycles_label="Generated Cycles from pySherlock", + ) + ) + pytest.fail("No exception raised when using a missing phase_name parameter") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, phase_name is invalid because it is None or empty." + ) + + try: + lifecycle.import_thermal_signal( + ImportThermalSignalRequest( + file_name="C:/Temp/ThermalSignalMissing.csv", + project="Tutorial Project", + thermal_signal_file_properties=ThermalSignalFileProperties( + header_row_count=0, + numeric_format="English", + column_delimiter=",", + time_column="Time", + time_units="sec", + temperature_column="Temperature", + temperature_units="C", + ), + phase_name="Environmental", + time_removal=False, + load_range_percentage=0.25, + number_of_bins=-1, + filtering_limit=0.0, + generated_cycles_label="Generated Cycles from pySherlock", + ) + ) + pytest.fail("No exception raised when using a missing generated_cycles_label parameter") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, number_of_bins must be greater than or equal to 0." + ) + + try: + lifecycle.import_thermal_signal( + ImportThermalSignalRequest( + file_name="C:/Temp/ThermalSignalMissing.csv", + project="Tutorial Project", + thermal_signal_file_properties=ThermalSignalFileProperties( + header_row_count=0, + numeric_format="English", + column_delimiter=",", + time_column="Time", + time_units="sec", + temperature_column="Temperature", + temperature_units="C", + ), + phase_name="Environmental", + time_removal=False, + load_range_percentage=0.25, + number_of_bins=0, + filtering_limit=0.0, + generated_cycles_label="", + ) + ) + pytest.fail("No exception raised when using a missing generated_cycles_label parameter") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, generated_cycles_label is invalid because it is None or empty." + ) if __name__ == "__main__": From f83bc2c86a3e49cd473301e8f4c2dfaa6193138b Mon Sep 17 00:00:00 2001 From: Paul Walters Date: Fri, 25 Jul 2025 13:55:46 -0400 Subject: [PATCH 02/10] Added import thermal signal API --- doc/source/api/index.rst | 2 ++ doc/source/api/lifecycle_types.rst | 13 +++++++++++++ 2 files changed, 15 insertions(+) create mode 100644 doc/source/api/lifecycle_types.rst diff --git a/doc/source/api/index.rst b/doc/source/api/index.rst index 4a543c9bc..1903505af 100644 --- a/doc/source/api/index.rst +++ b/doc/source/api/index.rst @@ -18,6 +18,7 @@ Use the search feature or click links to view API documentation. layer layer_types lifecycle + lifecycle_types model parts parts_types @@ -35,6 +36,7 @@ Use the search feature or click links to view API documentation. ansys.sherlock.core.layer ansys.sherlock.core.types.layer_types ansys.sherlock.core.lifecycle + ansys.sherlock.core.types.lifecycle_types ansys.sherlock.core.model ansys.sherlock.core.parts ansys.sherlock.core.types.parts_types diff --git a/doc/source/api/lifecycle_types.rst b/doc/source/api/lifecycle_types.rst new file mode 100644 index 000000000..f8861ebc4 --- /dev/null +++ b/doc/source/api/lifecycle_types.rst @@ -0,0 +1,13 @@ +.. _ref_common_types: + +LifeCycle Types +=============== +.. automodule:: ansys.sherlock.core.types.lifecycle_types +.. currentmodule:: ansys.sherlock.core.types.lifecycle_types + +Constants +--------- +.. autoclass:: ThermalSignalFileProperties + :members: +.. autoclass:: ImportThermalSignalRequest + :members: From 11e6085fd6fdc4005cac47c2462f9769c682853d Mon Sep 17 00:00:00 2001 From: Paul Walters Date: Fri, 1 Aug 2025 09:42:51 -0400 Subject: [PATCH 03/10] Added time filtering to importThermalSignal API. --- src/ansys/sherlock/core/lifecycle.py | 4 +- .../sherlock/core/types/lifecycle_types.py | 20 ++++--- tests/test_lifecycle.py | 52 +++++++++++++++++-- 3 files changed, 64 insertions(+), 12 deletions(-) diff --git a/src/ansys/sherlock/core/lifecycle.py b/src/ansys/sherlock/core/lifecycle.py index 0e351a569..290891ea2 100644 --- a/src/ansys/sherlock/core/lifecycle.py +++ b/src/ansys/sherlock/core/lifecycle.py @@ -2152,7 +2152,9 @@ def import_thermal_signal( >>> time_removal= False, >>> load_range_percentage=0.25, >>> number_of_bins=0, - >>> filtering_limit=0.0, + >>> temperature_range_filtering_limit=0.0, + >>> time_filtering_limit=72.0, + >>> time_filtering_limit_units="hr", >>> generated_cycles_label="Second Generated Cycles from Python", >>> ) >>> ) diff --git a/src/ansys/sherlock/core/types/lifecycle_types.py b/src/ansys/sherlock/core/types/lifecycle_types.py index fd65f4f1d..66c9d569d 100644 --- a/src/ansys/sherlock/core/types/lifecycle_types.py +++ b/src/ansys/sherlock/core/types/lifecycle_types.py @@ -73,15 +73,21 @@ class ImportThermalSignalRequest(BaseModel): time_removal: bool """Option to indicate that time results with shorter half-cycle durations are removed.""" load_range_percentage: float - """Defines the fraction of the range near peaks and valleys considered as a dwell region""" + """Defines the fraction of the range near peaks and valleys considered as a dwell region.""" number_of_bins: int - """Number of bins for binning cycles, 0 for no binning""" - filtering_limit: float - """Minimum cycle range to include in results, 0 for not filtering""" + """Number of bins for binning cycles, 0 for no binning.""" + temperature_range_filtering_limit: float + """Minimum cycle range to include in results, 0 for not filtering.""" + time_filtering_limit: float + """Maximum cycle time to include in results, default is 72 hours.""" + time_filtering_limit_units: str + """Units of the time filtering limit.""" generated_cycles_label: str """Label used to define the name of all generated thermal events.""" - @field_validator("file_name", "project", "phase_name", "generated_cycles_label") + @field_validator( + "file_name", "project", "phase_name", "time_filtering_limit_units", "generated_cycles_label" + ) @classmethod def str_validation(cls, value: str, info: ValidationInfo): """Validate string fields listed.""" @@ -105,6 +111,8 @@ def _convert_to_grpc(self) -> SherlockLifeCycleService_pb2.ImportThermalSignalRe timeRemoval=self.time_removal, loadRangePercentage=self.load_range_percentage, numberOfBins=self.number_of_bins, - filteringLimit=self.filtering_limit, + temperatureRangeFilteringLimit=self.temperature_range_filtering_limit, + timeFilteringLimit=self.time_filtering_limit, + timeFilteringLimitUnits=self.time_filtering_limit_units, generatedCyclesLabel=self.generated_cycles_label, ) diff --git a/tests/test_lifecycle.py b/tests/test_lifecycle.py index fd8493edf..83d74802a 100644 --- a/tests/test_lifecycle.py +++ b/tests/test_lifecycle.py @@ -2073,7 +2073,9 @@ def helper_test_import_thermal_signal(lifecycle: Lifecycle): time_removal=False, load_range_percentage=0.25, number_of_bins=0, - filtering_limit=0.0, + temperature_range_filtering_limit=0.0, + time_filtering_limit=72.0, + time_filtering_limit_units="hr", generated_cycles_label="Generated Cycles from pySherlock", ) ) @@ -2103,7 +2105,9 @@ def helper_test_import_thermal_signal(lifecycle: Lifecycle): time_removal=False, load_range_percentage=0.25, number_of_bins=0, - filtering_limit=0.0, + temperature_range_filtering_limit=0.0, + time_filtering_limit=72.0, + time_filtering_limit_units="hr", generated_cycles_label="Generated Cycles from pySherlock", ) ) @@ -2133,7 +2137,9 @@ def helper_test_import_thermal_signal(lifecycle: Lifecycle): time_removal=False, load_range_percentage=0.25, number_of_bins=0, - filtering_limit=0.0, + temperature_range_filtering_limit=0.0, + time_filtering_limit=72.0, + time_filtering_limit_units="hr", generated_cycles_label="Generated Cycles from pySherlock", ) ) @@ -2163,7 +2169,9 @@ def helper_test_import_thermal_signal(lifecycle: Lifecycle): time_removal=False, load_range_percentage=0.25, number_of_bins=-1, - filtering_limit=0.0, + temperature_range_filtering_limit=0.0, + time_filtering_limit=72.0, + time_filtering_limit_units="hr", generated_cycles_label="Generated Cycles from pySherlock", ) ) @@ -2193,7 +2201,41 @@ def helper_test_import_thermal_signal(lifecycle: Lifecycle): time_removal=False, load_range_percentage=0.25, number_of_bins=0, - filtering_limit=0.0, + temperature_range_filtering_limit=0.0, + time_filtering_limit=72.0, + time_filtering_limit_units="", + generated_cycles_label="", + ) + ) + pytest.fail("No exception raised when using a missing time_filtering_limit parameter") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, time_filtering_limit_units is invalid because it is None or empty." + ) + + try: + lifecycle.import_thermal_signal( + ImportThermalSignalRequest( + file_name="C:/Temp/ThermalSignalMissing.csv", + project="Tutorial Project", + thermal_signal_file_properties=ThermalSignalFileProperties( + header_row_count=0, + numeric_format="English", + column_delimiter=",", + time_column="Time", + time_units="sec", + temperature_column="Temperature", + temperature_units="C", + ), + phase_name="Environmental", + time_removal=False, + load_range_percentage=0.25, + number_of_bins=0, + temperature_range_filtering_limit=0.0, + time_filtering_limit=72.0, + time_filtering_limit_units="hr", generated_cycles_label="", ) ) From 5fb5ff1c6451479a9eced7bb0034f808b7c00dda Mon Sep 17 00:00:00 2001 From: Paul Walters Date: Tue, 12 Aug 2025 15:41:11 -0400 Subject: [PATCH 04/10] Added time filtering to importThermalSignal API. --- tests/test_lifecycle.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_lifecycle.py b/tests/test_lifecycle.py index 83d74802a..058fe784f 100644 --- a/tests/test_lifecycle.py +++ b/tests/test_lifecycle.py @@ -2204,10 +2204,10 @@ def helper_test_import_thermal_signal(lifecycle: Lifecycle): temperature_range_filtering_limit=0.0, time_filtering_limit=72.0, time_filtering_limit_units="", - generated_cycles_label="", + generated_cycles_label="Generated Cycles from pySherlock", ) ) - pytest.fail("No exception raised when using a missing time_filtering_limit parameter") + pytest.fail("No exception raised when using a missing time_filtering_limit_units parameter") except Exception as e: assert isinstance(e, pydantic.ValidationError) assert ( From fc9b6e885bf4bc504d315645c9c168fb50226755 Mon Sep 17 00:00:00 2001 From: Paul Walters Date: Wed, 3 Sep 2025 15:13:19 -0400 Subject: [PATCH 05/10] Updated rainflow cycle binning to use separate binning parameters for range, mean, and dwell. --- src/ansys/sherlock/core/lifecycle.py | 4 +- .../sherlock/core/types/lifecycle_types.py | 14 ++- tests/test_lifecycle.py | 96 +++++++++++++++++-- 3 files changed, 101 insertions(+), 13 deletions(-) diff --git a/src/ansys/sherlock/core/lifecycle.py b/src/ansys/sherlock/core/lifecycle.py index 290891ea2..ac985bb09 100644 --- a/src/ansys/sherlock/core/lifecycle.py +++ b/src/ansys/sherlock/core/lifecycle.py @@ -2151,7 +2151,9 @@ def import_thermal_signal( >>> phase_name=phaseName, >>> time_removal= False, >>> load_range_percentage=0.25, - >>> number_of_bins=0, + >>> number_of_range_bins=0, + >>> number_of_mean_bins=0, + >>> number_of_dwell_bins=0, >>> temperature_range_filtering_limit=0.0, >>> time_filtering_limit=72.0, >>> time_filtering_limit_units="hr", diff --git a/src/ansys/sherlock/core/types/lifecycle_types.py b/src/ansys/sherlock/core/types/lifecycle_types.py index 66c9d569d..8be3ff486 100644 --- a/src/ansys/sherlock/core/types/lifecycle_types.py +++ b/src/ansys/sherlock/core/types/lifecycle_types.py @@ -74,8 +74,12 @@ class ImportThermalSignalRequest(BaseModel): """Option to indicate that time results with shorter half-cycle durations are removed.""" load_range_percentage: float """Defines the fraction of the range near peaks and valleys considered as a dwell region.""" - number_of_bins: int - """Number of bins for binning cycles, 0 for no binning.""" + number_of_range_bins: int + """Number of range bins for binning cycles, 0 for no range binning.""" + number_of_mean_bins: int + """Number of mean bins for binning cycles, 0 for no mean binning.""" + number_of_dwell_bins: int + """Number of dwell bins for binning cycles, 0 for no dwell binning.""" temperature_range_filtering_limit: float """Minimum cycle range to include in results, 0 for not filtering.""" time_filtering_limit: float @@ -93,7 +97,7 @@ def str_validation(cls, value: str, info: ValidationInfo): """Validate string fields listed.""" return basic_str_validator(value, info.field_name) - @field_validator("number_of_bins") + @field_validator("number_of_range_bins", "number_of_mean_bins", "number_of_dwell_bins") @classmethod def non_negative_int_validation(cls, value: int, info: ValidationInfo): """Validate integer fields listed contain non-negative values.""" @@ -110,7 +114,9 @@ def _convert_to_grpc(self) -> SherlockLifeCycleService_pb2.ImportThermalSignalRe fileProperties=self.thermal_signal_file_properties._convert_to_grpc(), timeRemoval=self.time_removal, loadRangePercentage=self.load_range_percentage, - numberOfBins=self.number_of_bins, + numberOfRangeBins=self.number_of_range_bins, + numberOfMeanBins=self.number_of_mean_bins, + numberOfDwellBins=self.number_of_dwell_bins, temperatureRangeFilteringLimit=self.temperature_range_filtering_limit, timeFilteringLimit=self.time_filtering_limit, timeFilteringLimitUnits=self.time_filtering_limit_units, diff --git a/tests/test_lifecycle.py b/tests/test_lifecycle.py index 058fe784f..7fc62228b 100644 --- a/tests/test_lifecycle.py +++ b/tests/test_lifecycle.py @@ -2072,7 +2072,9 @@ def helper_test_import_thermal_signal(lifecycle: Lifecycle): phase_name="Environmental", time_removal=False, load_range_percentage=0.25, - number_of_bins=0, + number_of_range_bins=0, + number_of_mean_bins=0, + number_of_dwell_bins=0, temperature_range_filtering_limit=0.0, time_filtering_limit=72.0, time_filtering_limit_units="hr", @@ -2104,7 +2106,9 @@ def helper_test_import_thermal_signal(lifecycle: Lifecycle): phase_name="Environmental", time_removal=False, load_range_percentage=0.25, - number_of_bins=0, + number_of_range_bins=0, + number_of_mean_bins=0, + number_of_dwell_bins=0, temperature_range_filtering_limit=0.0, time_filtering_limit=72.0, time_filtering_limit_units="hr", @@ -2136,7 +2140,9 @@ def helper_test_import_thermal_signal(lifecycle: Lifecycle): phase_name="", time_removal=False, load_range_percentage=0.25, - number_of_bins=0, + number_of_range_bins=0, + number_of_mean_bins=0, + number_of_dwell_bins=0, temperature_range_filtering_limit=0.0, time_filtering_limit=72.0, time_filtering_limit_units="hr", @@ -2168,19 +2174,89 @@ def helper_test_import_thermal_signal(lifecycle: Lifecycle): phase_name="Environmental", time_removal=False, load_range_percentage=0.25, - number_of_bins=-1, + number_of_range_bins=-1, + number_of_mean_bins=0, + number_of_dwell_bins=0, temperature_range_filtering_limit=0.0, time_filtering_limit=72.0, time_filtering_limit_units="hr", generated_cycles_label="Generated Cycles from pySherlock", ) ) - pytest.fail("No exception raised when using a missing generated_cycles_label parameter") + pytest.fail("No exception raised when using invalid number_of_range_bins parameter") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, number_of_range_bins must be greater than or equal to 0." + ) + + try: + lifecycle.import_thermal_signal( + ImportThermalSignalRequest( + file_name="C:/Temp/ThermalSignalMissing.csv", + project="Tutorial Project", + thermal_signal_file_properties=ThermalSignalFileProperties( + header_row_count=0, + numeric_format="English", + column_delimiter=",", + time_column="Time", + time_units="sec", + temperature_column="Temperature", + temperature_units="C", + ), + phase_name="Environmental", + time_removal=False, + load_range_percentage=0.25, + number_of_range_bins=0, + number_of_mean_bins=-1, + number_of_dwell_bins=0, + temperature_range_filtering_limit=0.0, + time_filtering_limit=72.0, + time_filtering_limit_units="hr", + generated_cycles_label="Generated Cycles from pySherlock", + ) + ) + pytest.fail("No exception raised when using invalid number_of_mean_bins parameter") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, number_of_mean_bins must be greater than or equal to 0." + ) + + try: + lifecycle.import_thermal_signal( + ImportThermalSignalRequest( + file_name="C:/Temp/ThermalSignalMissing.csv", + project="Tutorial Project", + thermal_signal_file_properties=ThermalSignalFileProperties( + header_row_count=0, + numeric_format="English", + column_delimiter=",", + time_column="Time", + time_units="sec", + temperature_column="Temperature", + temperature_units="C", + ), + phase_name="Environmental", + time_removal=False, + load_range_percentage=0.25, + number_of_range_bins=0, + number_of_mean_bins=0, + number_of_dwell_bins=-1, + temperature_range_filtering_limit=0.0, + time_filtering_limit=72.0, + time_filtering_limit_units="hr", + generated_cycles_label="Generated Cycles from pySherlock", + ) + ) + pytest.fail("No exception raised when using invalid number_of_dwell_bins parameter") except Exception as e: assert isinstance(e, pydantic.ValidationError) assert ( str(e.errors()[0]["msg"]) - == "Value error, number_of_bins must be greater than or equal to 0." + == "Value error, number_of_dwell_bins must be greater than or equal to 0." ) try: @@ -2200,7 +2276,9 @@ def helper_test_import_thermal_signal(lifecycle: Lifecycle): phase_name="Environmental", time_removal=False, load_range_percentage=0.25, - number_of_bins=0, + number_of_range_bins=0, + number_of_mean_bins=0, + number_of_dwell_bins=0, temperature_range_filtering_limit=0.0, time_filtering_limit=72.0, time_filtering_limit_units="", @@ -2232,7 +2310,9 @@ def helper_test_import_thermal_signal(lifecycle: Lifecycle): phase_name="Environmental", time_removal=False, load_range_percentage=0.25, - number_of_bins=0, + number_of_range_bins=0, + number_of_mean_bins=0, + number_of_dwell_bins=0, temperature_range_filtering_limit=0.0, time_filtering_limit=72.0, time_filtering_limit_units="hr", From 7a807ee5e4c8747608bd7b7200410a2a8d527fbb Mon Sep 17 00:00:00 2001 From: Paul Walters Date: Thu, 2 Oct 2025 13:55:30 -0400 Subject: [PATCH 06/10] Added pcb_material_elasticity option to export_FEA_model() --- examples/03-exporting/export_fea_model.py | 2 ++ src/ansys/sherlock/core/model.py | 17 +++++++++++--- tests/test_model.py | 27 ++++++++++++++++------- 3 files changed, 35 insertions(+), 11 deletions(-) diff --git a/examples/03-exporting/export_fea_model.py b/examples/03-exporting/export_fea_model.py index 29ecfffc2..98a18c820 100644 --- a/examples/03-exporting/export_fea_model.py +++ b/examples/03-exporting/export_fea_model.py @@ -39,6 +39,7 @@ import os +from ansys.api.sherlock.v0.SherlockModelService_pb2 import PcbMaterialElasticity from examples.examples_globals import get_sherlock_tutorial_path, get_temp_dir from ansys.sherlock.core import launcher @@ -115,6 +116,7 @@ clear_FEA_database=True, use_FEA_model_id=True, coordinate_units="mm", + pcb_material_elasticity=PcbMaterialElasticity.Orthotropic, ) print(f"FEA model exported successfully to: {fea_export_path}") except SherlockExportFEAModelError as e: diff --git a/src/ansys/sherlock/core/model.py b/src/ansys/sherlock/core/model.py index dcb50b2bc..ed3a0227e 100644 --- a/src/ansys/sherlock/core/model.py +++ b/src/ansys/sherlock/core/model.py @@ -9,11 +9,15 @@ try: import SherlockModelService_pb2 - from SherlockModelService_pb2 import MeshType, TraceOutputType + from SherlockModelService_pb2 import MeshType, PcbMaterialElasticity, TraceOutputType import SherlockModelService_pb2_grpc except ModuleNotFoundError: from ansys.api.sherlock.v0 import SherlockModelService_pb2 - from ansys.api.sherlock.v0.SherlockModelService_pb2 import MeshType, TraceOutputType + from ansys.api.sherlock.v0.SherlockModelService_pb2 import ( + MeshType, + PcbMaterialElasticity, + TraceOutputType, + ) from ansys.api.sherlock.v0 import SherlockModelService_pb2_grpc from ansys.sherlock.core import LOG @@ -652,6 +656,7 @@ def export_FEA_model( clear_FEA_database: bool, use_FEA_model_id: bool, coordinate_units: str, + pcb_material_elasticity: PcbMaterialElasticity = PcbMaterialElasticity.Isotropic, ) -> int: """ Export a FEA model. @@ -711,6 +716,9 @@ def export_FEA_model( Whether to use FEA model ID. coordinate_units: str Units of the model coordinates to use when exporting a model. + pcb_material_elasticity: PcbMaterialElasticity + The type of PCB material elasticity to use when exporting a model. The default value is + ``PcbMaterialElasticity.Isotropic``. Returns @@ -720,6 +728,7 @@ def export_FEA_model( Examples -------- + >>> from ansys.api.sherlock.v0.SherlockModelService_pb2 import PcbMaterialElasticity >>> from ansys.sherlock.core.launcher import launch_sherlock >>> from ansys.sherlock.core.types.common_types import ( Measurement, @@ -751,7 +760,8 @@ def export_FEA_model( display_model=True, clear_FEA_database=True, use_FEA_model_id=True, - coordinate_units="mm" + coordinate_units="mm", + pcb_material_elasticity=PcbMaterialElasticity.Isotropic ) """ try: @@ -834,6 +844,7 @@ def export_FEA_model( export_request.clearFEADatabase = clear_FEA_database export_request.useFEAModelID = use_FEA_model_id export_request.coordinateUnits = coordinate_units + export_request.pcbMaterialElasticity = pcb_material_elasticity return_code = self.stub.exportFEAModel(export_request) if return_code.value != 0: diff --git a/tests/test_model.py b/tests/test_model.py index 27993cd3e..b7d8134e0 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -4,6 +4,7 @@ import platform import unittest +from ansys.api.sherlock.v0.SherlockModelService_pb2 import PcbMaterialElasticity import grpc import pytest @@ -315,6 +316,7 @@ def test_export_FEA_model(self): clear_FEA_database=True, use_FEA_model_id=True, coordinate_units="mm", + pcb_material_elasticity=PcbMaterialElasticity.Isotropic, ) pytest.fail("No exception raised for invalid project name") except SherlockExportFEAModelError as e: @@ -348,8 +350,9 @@ def test_export_FEA_model(self): clear_FEA_database=True, use_FEA_model_id=True, coordinate_units="mm", + pcb_material_elasticity=PcbMaterialElasticity.Isotropic, ) - pytest.fail("No exception raised for invalid project name") + pytest.fail("No exception raised for invalid CCA name") except SherlockExportFEAModelError as e: assert str(e) == "Export FEA model error: CCA name is invalid." @@ -381,8 +384,9 @@ def test_export_FEA_model(self): clear_FEA_database=True, use_FEA_model_id=True, coordinate_units="mm", + pcb_material_elasticity=PcbMaterialElasticity.Isotropic, ) - pytest.fail("No exception raised for invalid project name") + pytest.fail("No exception raised for invalid file path") except SherlockExportFEAModelError as e: assert str(e) == "Export FEA model error: Export file path is invalid." @@ -414,8 +418,9 @@ def test_export_FEA_model(self): clear_FEA_database=True, use_FEA_model_id=True, coordinate_units="mm", + pcb_material_elasticity=PcbMaterialElasticity.Isotropic, ) - pytest.fail("No exception raised for invalid project name") + pytest.fail("No exception raised for invalid file directory") except SherlockExportFEAModelError as e: assert str(e) == f'Export FEA model error: Export file directory "test" does not exist.' @@ -447,8 +452,9 @@ def test_export_FEA_model(self): clear_FEA_database=True, use_FEA_model_id=True, coordinate_units="mm", + pcb_material_elasticity=PcbMaterialElasticity.Isotropic, ) - pytest.fail("No exception raised for invalid project name") + pytest.fail("No exception raised for invalid minimum hole diameter") except SherlockExportFEAModelError as e: assert str(e) == "Export FEA model error: Minimum hole diameter is invalid." @@ -480,8 +486,9 @@ def test_export_FEA_model(self): clear_FEA_database=True, use_FEA_model_id=True, coordinate_units="mm", + pcb_material_elasticity=PcbMaterialElasticity.Isotropic, ) - pytest.fail("No exception raised for invalid project name") + pytest.fail("No exception raised for invalid maximum edge length") except SherlockExportFEAModelError as e: assert str(e) == "Export FEA model error: Maximum edge length is invalid." @@ -513,8 +520,9 @@ def test_export_FEA_model(self): clear_FEA_database=True, use_FEA_model_id=True, coordinate_units="mm", + pcb_material_elasticity=PcbMaterialElasticity.Isotropic, ) - pytest.fail("No exception raised for invalid project name") + pytest.fail("No exception raised for invalid maximum mesh size") except SherlockExportFEAModelError as e: assert str(e) == "Export FEA model error: Maximum mesh size is invalid." @@ -546,8 +554,9 @@ def test_export_FEA_model(self): clear_FEA_database=True, use_FEA_model_id=True, coordinate_units="mm", + pcb_material_elasticity=PcbMaterialElasticity.Isotropic, ) - pytest.fail("No exception raised for invalid project name") + pytest.fail("No exception raised for invalid vertical mesh size") except SherlockExportFEAModelError as e: assert str(e) == "Export FEA model error: Vertical mesh size is invalid." @@ -580,8 +589,9 @@ def test_export_FEA_model(self): clear_FEA_database=True, use_FEA_model_id=True, coordinate_units="mm", + pcb_material_elasticity=PcbMaterialElasticity.Isotropic, ) - pytest.fail("No exception raised for invalid project name") + pytest.fail("No exception raised for invalid CCA name") except Exception as e: assert type(e) == SherlockExportFEAModelError @@ -613,6 +623,7 @@ def test_export_FEA_model(self): clear_FEA_database=False, use_FEA_model_id=False, coordinate_units="mm", + pcb_material_elasticity=PcbMaterialElasticity.Isotropic, ) assert result == 0 From 3811aa113b8b1db66c1ca402622408c5d1997c62 Mon Sep 17 00:00:00 2001 From: Paul Walters Date: Mon, 17 Nov 2025 11:21:34 -0500 Subject: [PATCH 07/10] Updated lifecycle load profile methods to support CSV file imports from prior incomplete implementation. --- src/ansys/sherlock/core/lifecycle.py | 240 ++- .../sherlock/core/types/lifecycle_types.py | 338 ++++- tests/test_lifecycle.py | 1283 ++++++++++++++++- 3 files changed, 1793 insertions(+), 68 deletions(-) diff --git a/src/ansys/sherlock/core/lifecycle.py b/src/ansys/sherlock/core/lifecycle.py index 8d34a41da..c37a3ea88 100644 --- a/src/ansys/sherlock/core/lifecycle.py +++ b/src/ansys/sherlock/core/lifecycle.py @@ -42,11 +42,16 @@ from ansys.sherlock.core.types.lifecycle_types import ( DeleteEventRequest, DeletePhaseRequest, + HarmonicVibeProfileCsvFileProperties, ImportThermalSignalRequest, + RandomVibeProfileCsvFileProperties, SaveHarmonicProfileRequest, SaveRandomVibeProfileRequest, SaveShockPulseProfileRequest, SaveThermalProfileRequest, + ShockProfileDatasetCsvFileProperties, + ShockProfilePulsesCsvFileProperties, + ThermalProfileCsvFileProperties, ) from ansys.sherlock.core.utils.version_check import require_version @@ -1750,7 +1755,12 @@ def add_shock_profiles( @require_version() def load_random_vibe_profile( - self, project: str, phase_name: str, event_name: str, file_path: str + self, + project: str, + phase_name: str, + event_name: str, + file_path: str, + csv_file_properties: RandomVibeProfileCsvFileProperties = None, ) -> int: """Load random vibe profile from .csv or .dat file. @@ -1765,7 +1775,9 @@ def load_random_vibe_profile( event_name: str Name of the random vibe event. file_path: str - File path for thermal profile .dat or .csv file + File path for thermal profile .csv or .dat file + csv_file_properties: RandomVibeProfileCsvFileProperties + Properties of the random vibe profile CSV file, required if the file is in CSV format. Returns ------- @@ -1782,15 +1794,25 @@ def load_random_vibe_profile( True, True, True, - project="Test", + project="Test Project", cca_name="Card" ) >>> sherlock.lifecycle.load_random_vibe_profile( - project="Tutorial", + project="Test Project", phase_name="Phase 1", event_name="Random Event", - file_path="TestProfile.dat" + file_path="TestProfile.csv", + csv_file_properties=RandomVibeProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + frequency_column="Frequency", + frequency_units="HZ", + amplitude_column="Amplitude", + amplitude_units="G2/Hz" + ) ) """ try: @@ -1802,6 +1824,17 @@ def load_random_vibe_profile( raise SherlockLoadRandomVibeProfileError(message="Event name is invalid.") if file_path == "": raise SherlockLoadRandomVibeProfileError(message="File path is invalid.") + if file_path.lower().endswith(".csv"): + if csv_file_properties is None: + raise SherlockLoadRandomVibeProfileError( + "CSV file properties must be provided for CSV random vibe profile files." + ) + else: + if csv_file_properties is not None: + raise SherlockLoadRandomVibeProfileError( + "CSV file properties are not used for " "non-CSV random vibe profile files." + ) + if not self._is_connection_up(): raise SherlockNoGrpcConnectionException() @@ -1810,6 +1843,9 @@ def load_random_vibe_profile( phaseName=phase_name, eventName=event_name, filePath=file_path, + randomVibeCsvProps=( + csv_file_properties._convert_to_grpc() if csv_file_properties else None + ), ) response = self.stub.loadRandomVibeProfile(request) return_code = response.returnCode @@ -1824,9 +1860,14 @@ def load_random_vibe_profile( @require_version() def load_thermal_profile( - self, project: str, phase_name: str, event_name: str, file_path: str + self, + project: str, + phase_name: str, + event_name: str, + file_path: str, + csv_file_properties: ThermalProfileCsvFileProperties = None, ) -> int: - """Load a thermal profile from a .dat or .csv file. + """Load a thermal profile from a .csv or .dat file. Available Since: 2021R1 @@ -1839,7 +1880,9 @@ def load_thermal_profile( event_name: str Name of the random vibe event. file_path: str - File path for thermal profile .dat or .csv file + File path for thermal profile .csv or .dat file + csv_file_properties: ThermalProfileCsvFileProperties + Properties of the thermal profile CSV file, required if the file is in CSV format. Returns ------- @@ -1856,14 +1899,26 @@ def load_thermal_profile( True, True, True, - project="Test", + project="Test Project", cca_name="Card", ) - >>>loaded = sherlock.lifecycle.load_thermal_profile( - project="Tutorial", + >>> sherlock.lifecycle.load_thermal_profile( + project="Test Project", phase_name="Phase 1", event_name="Thermal Event", - file_path="Tutorial_Profile.dat" + file_path="Tutorial_Profile.csv", + csv_file_properties=ThermalProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + step_column="Step", + type_column="Type", + time_column="Time (min)", + time_units="min", + temp_column="Temp (C)", + temp_units="C" + ) ) """ try: @@ -1875,6 +1930,17 @@ def load_thermal_profile( raise SherlockLoadThermalProfileError(message="Event name is invalid.") if file_path == "": raise SherlockLoadThermalProfileError(message="File path is invalid.") + if file_path.lower().endswith(".csv"): + if csv_file_properties is None: + raise SherlockLoadThermalProfileError( + "CSV file properties must be provided for CSV thermal profile files." + ) + else: + if csv_file_properties is not None: + raise SherlockLoadThermalProfileError( + "CSV file properties are not used for non-CSV thermal profile files." + ) + if not self._is_connection_up(): raise SherlockNoGrpcConnectionException() @@ -1883,6 +1949,7 @@ def load_thermal_profile( phaseName=phase_name, eventName=event_name, filePath=file_path, + csvProps=csv_file_properties._convert_to_grpc() if csv_file_properties else None, ) response = self.stub.loadThermalProfile(request) return_code = response.returnCode @@ -1902,9 +1969,15 @@ def load_thermal_profile( @require_version() def load_harmonic_profile( - self, project: str, phase_name: str, event_name: str, file_path: str + self, + project: str, + phase_name: str, + event_name: str, + file_path: str, + triaxial_axis: str, + csv_file_properties: HarmonicVibeProfileCsvFileProperties = None, ) -> int: - """Load a harmonic profile from a DAT or CSV file to a life cycle phase. + """Load a harmonic profile from a .csv or .dat file to a life cycle phase. Available Since: 2021R1 @@ -1917,7 +1990,13 @@ def load_harmonic_profile( event_name: str Name of the harmonic event. file_path: str - Path for DAT or CSV file with the harmonic profile. + Path for .csv or .dat file with the harmonic profile. + triaxial_axis: str + Axis that this profile should be assigned to if the harmonic + profile type is ``"Triaxial"``. Options are: ``"x"``, ``"y"``, + and ``"z"``. + csv_file_properties: HarmonicProfileCsvFileProperties + Properties of the harmonic profile CSV file, required if the file is in CSV format. Returns ------- @@ -1934,15 +2013,26 @@ def load_harmonic_profile( True, True, True, - project="Test", + project="Test Project", cca_name="Card" ) - >>> loaded = sherlock.lifecycle.load_harmonic_profile( - project="Tutorial", + >>> sherlock.lifecycle.load_harmonic_profile( + project="Test Project", phase_name="Phase 1", event_name="Harmonic Event", - file_path="Test_Profile.dat" + file_path="Test_Profile.csv", + triaxial_axis="x", + csv_file_properties=HarmonicVibeProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + frequency_column="Frequency", + frequency_units="HZ", + load_column="Load", + load_units="G" + ) ) """ try: @@ -1954,6 +2044,17 @@ def load_harmonic_profile( raise SherlockLoadHarmonicProfileError(message="Event name is invalid.") if file_path == "": raise SherlockLoadHarmonicProfileError(message="File name is invalid.") + if file_path.lower().endswith(".csv"): + if csv_file_properties is None: + raise SherlockLoadHarmonicProfileError( + "CSV file properties must be provided for CSV harmonic profile files." + ) + else: + if csv_file_properties is not None: + raise SherlockLoadHarmonicProfileError( + "CSV file properties are not used for non-CSV harmonic profile files." + ) + if not self._is_connection_up(): raise SherlockNoGrpcConnectionException() @@ -1962,6 +2063,10 @@ def load_harmonic_profile( phaseName=phase_name, eventName=event_name, filePath=file_path, + harmonicCsvProps=( + csv_file_properties._convert_to_grpc() if csv_file_properties else None + ), + triaxialAxis=triaxial_axis, ) response = self.stub.loadHarmonicProfile(request) return_code = response.returnCode @@ -1980,7 +2085,12 @@ def load_harmonic_profile( @require_version() def load_shock_profile_dataset( - self, project: str, phase_name: str, event_name: str, file_path: str + self, + project: str, + phase_name: str, + event_name: str, + file_path: str, + csv_file_properties: ShockProfileDatasetCsvFileProperties = None, ) -> int: """Load shock profile dataset from a .csv or .dat file. @@ -1995,7 +2105,9 @@ def load_shock_profile_dataset( event_name: str Name of the random vibe event. file_path: str - File path for thermal profile .dat or .csv file + File path for thermal profile .csv or .dat file + csv_file_properties: ShockProfileDatasetCsvFileProperties + Properties of the shock profile dataset CSV file, required if the file is in CSV format. Returns ------- @@ -2012,10 +2124,26 @@ def load_shock_profile_dataset( True, True, True, - project="Test", + project="Test Project", cca_name="Card" ) + >>> sherlock.lifecycle.load_shock_profile_dataset( + project="Test Project", + phase_name="Phase 1", + event_name="Shock Event", + file_path="Test_Profile.csv", + csv_file_properties=ShockProfileDatasetCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + time_column="Time", + time_units="ms", + load_column="Load", + load_units="G" + ) + ) """ try: if project == "": @@ -2026,14 +2154,28 @@ def load_shock_profile_dataset( raise SherlockLoadShockProfileDatasetError(message="Event name is invalid.") if file_path == "": raise SherlockLoadShockProfileDatasetError(message="File path is invalid.") + if file_path.lower().endswith(".csv"): + if csv_file_properties is None: + raise SherlockLoadShockProfileDatasetError( + "CSV file properties must be provided for CSV shock profile dataset files." + ) + else: + if csv_file_properties is not None: + raise SherlockLoadShockProfileDatasetError( + "CSV file properties are not used for non-CSV shock profile dataset files." + ) + if not self._is_connection_up(): raise SherlockNoGrpcConnectionException() - request = SherlockLifeCycleService_pb2.LoadShockProfilePulsesRequest( + request = SherlockLifeCycleService_pb2.LoadShockProfileDatasetRequest( project=project, phaseName=phase_name, eventName=event_name, filePath=file_path, + shockDsCsvProps=( + csv_file_properties._convert_to_grpc() if csv_file_properties else None + ), ) response = self.stub.loadShockProfileDataset(request) return_code = response.returnCode @@ -2050,7 +2192,12 @@ def load_shock_profile_dataset( @require_version() def load_shock_profile_pulses( - self, project: str, phase_name: str, event_name: str, file_path: str + self, + project: str, + phase_name: str, + event_name: str, + file_path: str, + csv_file_properties: ShockProfilePulsesCsvFileProperties = None, ) -> int: """Load shock profile pulses from a .csv .dat file. @@ -2065,7 +2212,9 @@ def load_shock_profile_pulses( event_name: str Name of the random vibe event. file_path: str - Path for thermal profile .dat or .csv file + Path for thermal profile .csv or .dat file + csv_file_properties: ShockProfilePulsesCsvFileProperties + Properties of the shock profile pulses CSV file, required if the file is in CSV format. Returns ------- @@ -2089,9 +2238,24 @@ def load_shock_profile_pulses( project="Tutorial", phase_name="Phase 1", event_name="Shock Event", - file_path="Test_Profile.dat" + file_path="Test_Profile.csv", + csv_file_properties=ShockProfilePulsesCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + duration=25, + duration_units="ms", + sample_rate=0.1, + sample_rate_units="ms", + shape_column="Shape", + load_column="Load", + load_units="G", + frequency_column="Frequency", + frequency_units="HZ", + decay_column="Decay", + ) ) - """ try: if project == "": @@ -2102,6 +2266,17 @@ def load_shock_profile_pulses( raise SherlockLoadShockProfilePulsesError(message="Event name is invalid.") if file_path == "": raise SherlockLoadShockProfilePulsesError(message="File path is invalid.") + if file_path.lower().endswith(".csv"): + if csv_file_properties is None: + raise SherlockLoadShockProfilePulsesError( + "CSV file properties must be provided for CSV shock profile pulses files." + ) + else: + if csv_file_properties is not None: + raise SherlockLoadShockProfilePulsesError( + "CSV file properties are not used for non-CSV shock profile pulses files." + ) + if not self._is_connection_up(): raise SherlockNoGrpcConnectionException() @@ -2110,6 +2285,9 @@ def load_shock_profile_pulses( phaseName=phase_name, eventName=event_name, filePath=file_path, + shockPulsesCsvProps=( + csv_file_properties._convert_to_grpc() if csv_file_properties else None + ), ) response = self.stub.loadShockProfilePulses(request) return_code = response.returnCode @@ -2186,7 +2364,7 @@ def import_thermal_signal( def save_harmonic_profile( self, request: SaveHarmonicProfileRequest ) -> SherlockCommonService_pb2.ReturnCode: - """Save a harmonic life cycle event profile to a .dat or .csv file. + """Save a harmonic life cycle event profile to a .csv or .dat file. Available Since: 2026R1 @@ -2233,7 +2411,7 @@ def save_harmonic_profile( def save_random_vibe_profile( self, request: SaveRandomVibeProfileRequest ) -> SherlockCommonService_pb2.ReturnCode: - """Save a random vibe life cycle event profile to a .dat or .csv file. + """Save a random vibe life cycle event profile to a .csv or .dat file. Available Since: 2026R1 @@ -2277,7 +2455,7 @@ def save_random_vibe_profile( def save_shock_pulse_profile( self, request: SaveShockPulseProfileRequest ) -> SherlockCommonService_pb2.ReturnCode: - """Save a shock pulse life cycle event profile to a .dat or .csv file. + """Save a shock pulse life cycle event profile to a .csv or .dat file. Available Since: 2026R1 @@ -2321,7 +2499,7 @@ def save_shock_pulse_profile( def save_thermal_profile( self, request: SaveThermalProfileRequest ) -> SherlockCommonService_pb2.ReturnCode: - """Save a thermal life cycle event profile to a .dat or .csv file. + """Save a thermal life cycle event profile to a .csv or .dat file. Available Since: 2026R1 diff --git a/src/ansys/sherlock/core/types/lifecycle_types.py b/src/ansys/sherlock/core/types/lifecycle_types.py index 4734b73e4..00f4be6d0 100644 --- a/src/ansys/sherlock/core/types/lifecycle_types.py +++ b/src/ansys/sherlock/core/types/lifecycle_types.py @@ -2,9 +2,11 @@ """Module containing types for the Lifecycle Service.""" +from typing import Optional + from pydantic import BaseModel, ValidationInfo, field_validator -from ansys.sherlock.core.types.common_types import basic_str_validator +from ansys.sherlock.core.types.common_types import basic_str_validator, optional_str_validator try: import SherlockLifeCycleService_pb2 @@ -12,6 +14,340 @@ from ansys.api.sherlock.v0 import SherlockLifeCycleService_pb2 +class HarmonicVibeProfileCsvFileProperties(BaseModel): + """Properties of a harmonic vibe profile CSV file.""" + + profile_name: str + """Name of the harmonic vibe profile.""" + header_row_count: int + """Number of rows before the column header in the file.""" + column_delimiter: str = "," + """Delimiter used to separate columns in the file.""" + numeric_format: str = None + """Numeric format for the values.""" + frequency_column: str + """Name of the column containing frequency values.""" + frequency_units: str + """Units of the frequency values""" + load_column: str + """Name of the column containing load values.""" + load_units: str + """Units of the load values.""" + + def _convert_to_grpc( + self, + ) -> SherlockLifeCycleService_pb2.LoadHarmonicProfileRequest.CSVProps: + """Convert to gRPC CVSProps.""" + return SherlockLifeCycleService_pb2.LoadHarmonicProfileRequest.CSVProps( + profileName=self.profile_name, + headerRowNumber=self.header_row_count, + columnDelim=self.column_delimiter, + numericFormat=self.numeric_format, + freqColumn=self.frequency_column, + freqUnits=self.frequency_units, + loadColumn=self.load_column, + loadUnits=self.load_units, + ) + + @field_validator("header_row_count") + @classmethod + def non_negative_int_validation(cls, value: int, info: ValidationInfo): + """Validate integer fields listed contain non-negative values.""" + if value < 0: + raise ValueError(f"{info.field_name} must be greater than or equal to 0.") + return value + + @field_validator( + "profile_name", "frequency_column", "frequency_units", "load_column", "load_units" + ) + @classmethod + def str_validation(cls, value: str, info: ValidationInfo): + """Validate string fields listed.""" + return basic_str_validator(value, info.field_name) + + @field_validator("column_delimiter", "numeric_format") + @classmethod + def optional_str_validation(cls, value: Optional[str], info): + """Allow the test_point_ids to not be set, i.e., None.""" + return optional_str_validator(value, info.field_name) + + +class RandomVibeProfileCsvFileProperties(BaseModel): + """Properties of a random vibe profile CSV file.""" + + profile_name: str + """Name of the random vibe profile.""" + header_row_count: int + """Number of rows before the column header in the file.""" + column_delimiter: str = "," + """Delimiter used to separate columns in the file.""" + numeric_format: str = None + """Numeric format for the values.""" + frequency_column: str + """Name of the column containing frequency values.""" + frequency_units: str + """Units of the frequency values""" + amplitude_column: str + """Name of the column containing amplitude values.""" + amplitude_units: str + """Units of the amplitude values.""" + + def _convert_to_grpc( + self, + ) -> SherlockLifeCycleService_pb2.LoadRandomVibeProfileRequest.CSVProps: + """Convert to gRPC CVSProps.""" + return SherlockLifeCycleService_pb2.LoadRandomVibeProfileRequest.CSVProps( + profileName=self.profile_name, + headerRowNumber=self.header_row_count, + columnDelim=self.column_delimiter, + numericFormat=self.numeric_format, + freqColumn=self.frequency_column, + freqUnits=self.frequency_units, + amplColumn=self.amplitude_column, + amplUnits=self.amplitude_units, + ) + + @field_validator("header_row_count") + @classmethod + def non_negative_int_validation(cls, value: int, info: ValidationInfo): + """Validate integer fields listed contain non-negative values.""" + if value < 0: + raise ValueError(f"{info.field_name} must be greater than or equal to 0.") + return value + + @field_validator( + "profile_name", "frequency_column", "frequency_units", "amplitude_column", "amplitude_units" + ) + @classmethod + def str_validation(cls, value: str, info: ValidationInfo): + """Validate string fields listed.""" + return basic_str_validator(value, info.field_name) + + @field_validator("column_delimiter", "numeric_format") + @classmethod + def optional_str_validation(cls, value: Optional[str], info): + """Allow the test_point_ids to not be set, i.e., None.""" + return optional_str_validator(value, info.field_name) + + +class ShockProfileDatasetCsvFileProperties(BaseModel): + """Properties of a shock event profile using dataset CSV file.""" + + profile_name: str + """Name of the shock vibe profile.""" + header_row_count: int + """Number of rows before the column header in the file.""" + column_delimiter: str = "," + """Delimiter used to separate columns in the file.""" + numeric_format: str = None + """Numeric format for the values.""" + time_column: str + """Name of the column containing timeuency values.""" + time_units: str + """Units of the timeuency values""" + load_column: str + """Name of the column containing load values.""" + load_units: str + """Units of the load values.""" + + def _convert_to_grpc( + self, + ) -> SherlockLifeCycleService_pb2.LoadShockProfileDatasetRequest.CSVProps: + """Convert to gRPC CVSProps.""" + return SherlockLifeCycleService_pb2.LoadShockProfileDatasetRequest.CSVProps( + profileName=self.profile_name, + headerRowNumber=self.header_row_count, + columnDelim=self.column_delimiter, + numericFormat=self.numeric_format, + timeColumn=self.time_column, + timeUnits=self.time_units, + loadColumn=self.load_column, + loadUnits=self.load_units, + ) + + @field_validator("header_row_count") + @classmethod + def non_negative_int_validation(cls, value: int, info: ValidationInfo): + """Validate integer fields listed contain non-negative values.""" + if value < 0: + raise ValueError(f"{info.field_name} must be greater than or equal to 0.") + return value + + @field_validator("profile_name", "time_column", "time_units", "load_column", "load_units") + @classmethod + def str_validation(cls, value: str, info: ValidationInfo): + """Validate string fields listed.""" + return basic_str_validator(value, info.field_name) + + @field_validator("column_delimiter", "numeric_format") + @classmethod + def optional_str_validation(cls, value: Optional[str], info): + """Allow the test_point_ids to not be set, i.e., None.""" + return optional_str_validator(value, info.field_name) + + +class ShockProfilePulsesCsvFileProperties(BaseModel): + """Properties of a shock event profile using pulses CSV file.""" + + profile_name: str + """Name of the shock vibe profile.""" + header_row_count: int + """Number of rows before the column header in the file.""" + column_delimiter: str = "," + """Delimiter used to separate columns in the file.""" + numeric_format: str = None + """Numeric format for the values.""" + duration: float + """Pulse duration length""" + duration_units: str + """Time units of the pulse duration""" + sample_rate: float + """Sample rate""" + sample_rate_units: str + """Time units of the sample rate""" + shape_column: str + """Name of the column containing shape values.""" + load_column: str + """Name of the column containing load values.""" + load_units: str + """Units of the load values.""" + frequency_column: str + """Name of the column containing frequency values.""" + frequency_units: str + """Units of the frequency values""" + decay_column: str + """Name of the column containing decay values.""" + + def _convert_to_grpc( + self, + ) -> SherlockLifeCycleService_pb2.LoadShockProfilePulsesRequest.CSVProps: + """Convert to gRPC CVSProps.""" + return SherlockLifeCycleService_pb2.LoadShockProfilePulsesRequest.CSVProps( + profileName=self.profile_name, + headerRowNumber=self.header_row_count, + columnDelim=self.column_delimiter, + numericFormat=self.numeric_format, + duration=self.duration, + durationUnits=self.duration_units, + sampleRate=self.sample_rate, + sampleRateUnits=self.sample_rate_units, + shapeColumn=self.shape_column, + loadColumn=self.load_column, + loadUnits=self.load_units, + freqColumn=self.frequency_column, + freqUnits=self.frequency_units, + decayColumn=self.decay_column, + ) + + @field_validator("header_row_count") + @classmethod + def non_negative_int_validation(cls, value: int, info: ValidationInfo): + """Validate integer fields listed contain non-negative values.""" + if value < 0: + raise ValueError(f"{info.field_name} must be greater than or equal to 0.") + return value + + @field_validator("duration", "sample_rate") + @classmethod + def greater_than_zero_float_validation(cls, value: float, info: ValidationInfo): + """Validate float fields listed contain values greater than 0.""" + if value < 0: + raise ValueError(f"{info.field_name} must be greater than 0.") + return value + + @field_validator( + "profile_name", + "duration_units", + "sample_rate_units", + "shape_column", + "load_column", + "load_units", + "frequency_column", + "frequency_units", + "decay_column", + ) + @classmethod + def str_validation(cls, value: str, info: ValidationInfo): + """Validate string fields listed.""" + return basic_str_validator(value, info.field_name) + + @field_validator("column_delimiter", "numeric_format") + @classmethod + def optional_str_validation(cls, value: Optional[str], info): + """Allow the test_point_ids to not be set, i.e., None.""" + return optional_str_validator(value, info.field_name) + + +class ThermalProfileCsvFileProperties(BaseModel): + """Properties of a thermal profile CSV file.""" + + profile_name: str + """Name of the thermal profile.""" + header_row_count: int + """Number of rows before the column header in the file.""" + column_delimiter: str = "," + """Delimiter used to separate columns in the file.""" + numeric_format: str = None + """Numeric format for the values.""" + step_column: str + """Name of the column containing step values.""" + type_column: str + """Name of the column containing step type values.""" + time_column: str + """Name of the column containing time duration values.""" + time_units: str + """Units of the time values.""" + temperature_column: str + """Name of the column containing temperature values.""" + temperature_units: str + """Units of the temperature values.""" + + def _convert_to_grpc( + self, + ) -> SherlockLifeCycleService_pb2.LoadThermalProfileRequest.CSVProps: + """Convert to gRPC CVSProps.""" + return SherlockLifeCycleService_pb2.LoadThermalProfileRequest.CSVProps( + profileName=self.profile_name, + headerRowNumber=self.header_row_count, + columnDelim=self.column_delimiter, + numericFormat=self.numeric_format, + stepColumn=self.step_column, + typeColumn=self.type_column, + timeColumn=self.time_column, + timeUnits=self.time_units, + tempColumn=self.temperature_column, + tempUnits=self.temperature_units, + ) + + @field_validator("header_row_count") + @classmethod + def non_negative_int_validation(cls, value: int, info: ValidationInfo): + """Validate integer fields listed contain non-negative values.""" + if value < 0: + raise ValueError(f"{info.field_name} must be greater than or equal to 0.") + return value + + @field_validator( + "profile_name", + "step_column", + "type_column", + "time_column", + "time_units", + "temperature_column", + "temperature_units", + ) + @classmethod + def str_validation(cls, value: str, info: ValidationInfo): + """Validate string fields listed.""" + return basic_str_validator(value, info.field_name) + + @field_validator("column_delimiter", "numeric_format") + @classmethod + def optional_str_validation(cls, value: Optional[str], info): + """Allow the test_point_ids to not be set, i.e., None.""" + return optional_str_validator(value, info.field_name) + + class ThermalSignalFileProperties(BaseModel): """Properties of a thermal signal file.""" diff --git a/tests/test_lifecycle.py b/tests/test_lifecycle.py index 9e8d4cd64..f672e92b9 100644 --- a/tests/test_lifecycle.py +++ b/tests/test_lifecycle.py @@ -28,11 +28,16 @@ from ansys.sherlock.core.types.lifecycle_types import ( DeleteEventRequest, DeletePhaseRequest, + HarmonicVibeProfileCsvFileProperties, ImportThermalSignalRequest, + RandomVibeProfileCsvFileProperties, SaveHarmonicProfileRequest, SaveRandomVibeProfileRequest, SaveShockPulseProfileRequest, SaveThermalProfileRequest, + ShockProfileDatasetCsvFileProperties, + ShockProfilePulsesCsvFileProperties, + ThermalProfileCsvFileProperties, ThermalSignalFileProperties, ) from ansys.sherlock.core.utils.version_check import SKIP_VERSION_CHECK @@ -1802,7 +1807,7 @@ def helper_test_load_random_vibe_profile(lifecycle: Lifecycle): try: lifecycle.load_random_vibe_profile( - "Test", + "Test Project", "", "Random Event", "TestProfile.dat", @@ -1813,7 +1818,7 @@ def helper_test_load_random_vibe_profile(lifecycle: Lifecycle): try: lifecycle.load_random_vibe_profile( - "Test", + "Test Project", "Phase 1", "", "TestProfile.dat", @@ -1824,7 +1829,7 @@ def helper_test_load_random_vibe_profile(lifecycle: Lifecycle): try: lifecycle.load_random_vibe_profile( - "Test", + "Test Project", "Phase 1", "Random Event", "", @@ -1833,11 +1838,192 @@ def helper_test_load_random_vibe_profile(lifecycle: Lifecycle): except SherlockLoadRandomVibeProfileError as e: assert str(e.message) == "File path is invalid." + try: + lifecycle.load_random_vibe_profile( + "Test Project", + "Phase 1", + "Random Event", + "RandomProfile.csv", + ) + pytest.fail("No exception raised when using missing CSV properties") + except SherlockLoadRandomVibeProfileError as e: + assert ( + str(e.message) + == "CSV file properties must be provided for CSV random vibe profile files." + ) + + try: + lifecycle.load_random_vibe_profile( + "Test Project", + "Phase 1", + "Random Event", + "RandomProfile.csv", + csv_file_properties=RandomVibeProfileCsvFileProperties( + profile_name="", + header_row_count=0, + column_delimiter=",", + frequency_column="Frequency", + frequency_units="Hz", + amplitude_column="Amplitude", + amplitude_units="G2/Hz", + ), + ) + pytest.fail("No exception raised when using missing profile name") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, profile_name is invalid because it is None or empty." + ) + + try: + lifecycle.load_random_vibe_profile( + "Test Project", + "Phase 1", + "Random Event", + "RandomProfile.csv", + csv_file_properties=RandomVibeProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + frequency_column="", + frequency_units="Hz", + amplitude_column="Amplitude", + amplitude_units="G2/Hz", + ), + ) + pytest.fail("No exception raised when using missing frequency column") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, frequency_column is invalid because it is None or empty." + ) + + try: + lifecycle.load_random_vibe_profile( + "Test Project", + "Phase 1", + "Random Event", + "RandomProfile.csv", + csv_file_properties=RandomVibeProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + frequency_column="Frequency", + frequency_units="", + amplitude_column="Amplitude", + amplitude_units="G2/Hz", + ), + ) + pytest.fail("No exception raised when using missing frequency units") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, frequency_units is invalid because it is None or empty." + ) + + try: + lifecycle.load_random_vibe_profile( + "Test Project", + "Phase 1", + "Random Event", + "RandomProfile.csv", + csv_file_properties=RandomVibeProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + frequency_column="Frequency", + frequency_units="Hz", + amplitude_column="", + amplitude_units="G2/Hz", + ), + ) + pytest.fail("No exception raised when using missing amplitude column") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, amplitude_column is invalid because it is None or empty." + ) + + try: + lifecycle.load_random_vibe_profile( + "Test Project", + "Phase 1", + "Random Event", + "RandomProfile.csv", + csv_file_properties=RandomVibeProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + frequency_column="Frequency", + frequency_units="Hz", + amplitude_column="Amplitude", + amplitude_units="", + ), + ) + pytest.fail("No exception raised when using missing amplitude units") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, amplitude_units is invalid because it is None or empty." + ) + + try: + lifecycle.load_random_vibe_profile( + "Test Project", + "Phase 1", + "Random Event", + "RandomProfile.csv", + csv_file_properties=RandomVibeProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=-1, + column_delimiter=",", + frequency_column="Frequency", + frequency_units="Hz", + amplitude_column="Amplitude", + amplitude_units="G2/Hz", + ), + ) + pytest.fail("No exception raised when using negative header row count") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, header_row_count must be greater than or equal to 0." + ) + + try: + lifecycle.load_random_vibe_profile( + "Test Project", + "Phase 1", + "Random Event", + "RandomProfile.dat", + csv_file_properties=RandomVibeProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + frequency_column="Frequency", + frequency_units="Hz", + amplitude_column="Amplitude", + amplitude_units="G2/Hz", + ), + ) + pytest.fail("No exception raised when using csv_file_properties for non-CSV file") + except SherlockLoadRandomVibeProfileError as e: + assert ( + str(e.message) + == "CSV file properties are not used for non-CSV random vibe profile files." + ) + if lifecycle._is_connection_up(): # happy path test missing because needs valid file try: lifecycle.load_random_vibe_profile( - "Invalid Project", + "Test Project", "Phase 1", "Random Event", "TestProfile.dat", @@ -1851,34 +2037,223 @@ def helper_test_load_harmonic_profile(lifecycle: Lifecycle): """Test load_harmonic_profile API.""" try: - lifecycle.load_harmonic_profile("", "Phase 1", "Harmonic Event", "Test_Profile.dat") + lifecycle.load_harmonic_profile("", "Phase 1", "Harmonic Event", "Test_Profile.dat", "X") pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadHarmonicProfileError as e: - assert str(e.str_itr()) == "['Load harmonic profile error: Project name is invalid.']" + assert str(e.message) == "Project name is invalid." try: - lifecycle.load_harmonic_profile("Test", "", "Harmonic Event", "Test_Profile.dat") + lifecycle.load_harmonic_profile( + "Test Project", "", "Harmonic Event", "Test_Profile.dat", "X" + ) pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadHarmonicProfileError as e: - assert str(e.str_itr()) == "['Load harmonic profile error: Phase name is invalid.']" + assert str(e.message) == "Phase name is invalid." try: - lifecycle.load_harmonic_profile("Test", "Phase 1", "", "Test_Profile.dat") + lifecycle.load_harmonic_profile("Test Project", "Phase 1", "", "Test_Profile.dat", "X") pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadHarmonicProfileError as e: - assert str(e.str_itr()) == "['Load harmonic profile error: Event name is invalid.']" + assert str(e.message) == "Event name is invalid." try: - lifecycle.load_harmonic_profile("Test", "Phase 1", "Harmonic Event", "") + lifecycle.load_harmonic_profile("Test Project", "Phase 1", "Harmonic Event", "", "X") pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadHarmonicProfileError as e: - assert str(e.str_itr()) == "['Load harmonic profile error: File name is invalid.']" + assert str(e.message) == "File name is invalid." + + try: + lifecycle.load_harmonic_profile( + "Test Project", + "Phase 1", + "Harmonic Event", + "Harmonic_Profile.csv", + "X", + ) + pytest.fail("No exception raised when using missing CSV properties") + except SherlockLoadHarmonicProfileError as e: + assert ( + str(e.message) == "CSV file properties must be provided for CSV harmonic profile files." + ) + + try: + lifecycle.load_harmonic_profile( + "Test Project", + "Phase 1", + "Harmonic Event", + "Harmonic_Profile.csv", + "X", + csv_file_properties=HarmonicVibeProfileCsvFileProperties( + profile_name="", + header_row_count=0, + column_delimiter=",", + frequency_column="Frequency", + frequency_units="Hz", + load_column="Load", + load_units="G", + ), + ) + pytest.fail("No exception raised when using missing profile name") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, profile_name is invalid because it is None or empty." + ) + + try: + lifecycle.load_harmonic_profile( + "Test Project", + "Phase 1", + "Harmonic Event", + "Harmonic_Profile.csv", + "X", + csv_file_properties=HarmonicVibeProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + frequency_column="", + frequency_units="Hz", + load_column="Load", + load_units="G", + ), + ) + pytest.fail("No exception raised when using missing frequency column") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, frequency_column is invalid because it is None or empty." + ) + + try: + lifecycle.load_harmonic_profile( + "Test Project", + "Phase 1", + "Harmonic Event", + "Harmonic_Profile.csv", + "X", + csv_file_properties=HarmonicVibeProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + frequency_column="Frequency", + frequency_units="", + load_column="Load", + load_units="G", + ), + ) + pytest.fail("No exception raised when using missing frequency units") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, frequency_units is invalid because it is None or empty." + ) + + try: + lifecycle.load_harmonic_profile( + "Test Project", + "Phase 1", + "Harmonic Event", + "Harmonic_Profile.csv", + "X", + csv_file_properties=HarmonicVibeProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + frequency_column="Frequency", + frequency_units="Hz", + load_column="", + load_units="G", + ), + ) + pytest.fail("No exception raised when using missing load column") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, load_column is invalid because it is None or empty." + ) + + try: + lifecycle.load_harmonic_profile( + "Test Project", + "Phase 1", + "Harmonic Event", + "Harmonic_Profile.csv", + "X", + csv_file_properties=HarmonicVibeProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + frequency_column="Frequency", + frequency_units="Hz", + load_column="Load", + load_units="", + ), + ) + pytest.fail("No exception raised when using missing load units") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, load_units is invalid because it is None or empty." + ) + + try: + lifecycle.load_harmonic_profile( + "Test Project", + "Phase 1", + "Harmonic Event", + "Harmonic_Profile.csv", + "X", + csv_file_properties=HarmonicVibeProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=-1, + column_delimiter=",", + frequency_column="Frequency", + frequency_units="Hz", + load_column="Load", + load_units="G", + ), + ) + pytest.fail("No exception raised when using negative header row count") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, header_row_count must be greater than or equal to 0." + ) + + try: + lifecycle.load_harmonic_profile( + "Test Project", + "Phase 1", + "Harmonic Event", + "Harmonic_Profile.dat", + "X", + csv_file_properties=HarmonicVibeProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + frequency_column="Frequency", + frequency_units="Hz", + load_column="Load", + load_units="G", + ), + ) + pytest.fail("No exception raised when using csv_file_properties for non-CSV file") + except SherlockLoadHarmonicProfileError as e: + assert ( + str(e.message) == "CSV file properties are not used for non-CSV harmonic profile files." + ) if lifecycle._is_connection_up(): # happy path test missing because needs valid file try: lifecycle.load_harmonic_profile( - "Invalid Project", + "Test Project", "Phase 1", "Harmonic Event", "Test_Profile.dat", @@ -1900,40 +2275,285 @@ def helper_test_load_thermal_profile(lifecycle: Lifecycle): ) pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadThermalProfileError as e: - assert str(e.str_itr()) == "['Load thermal profile error: Project name is invalid.']" + assert str(e.message) == "Project name is invalid." try: lifecycle.load_thermal_profile( - "Test", + "Test Project", "", "Thermal Event", "Tutorial_Profile.dat", ) pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadThermalProfileError as e: - assert str(e.str_itr()) == "['Load thermal profile error: Phase name is invalid.']" + assert str(e.message) == "Phase name is invalid." try: lifecycle.load_thermal_profile( - "Test", + "Test Project", "Phase 1", "", "Tutorial_Profile.dat", ) pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadThermalProfileError as e: - assert str(e.str_itr()) == "['Load thermal profile error: Event name is invalid.']" + assert str(e.message) == "Event name is invalid." try: lifecycle.load_thermal_profile( - "Test", + "Test Project", "Phase 1", "Thermal Event", "", ) pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadThermalProfileError as e: - assert str(e.str_itr()) == "['Load thermal profile error: File path is invalid.']" + assert str(e.message) == "File path is invalid." + + try: + lifecycle.load_thermal_profile( + "Test Project", + "Phase 1", + "Thermal Event", + "Tutorial_Profile.csv", + ) + pytest.fail("No exception raised when using missing CSV properties") + except SherlockLoadThermalProfileError as e: + assert ( + str(e.message) == "CSV file properties must be provided for CSV thermal profile files." + ) + + try: + lifecycle.load_thermal_profile( + "Test Project", + "Phase 1", + "Thermal Event", + "Tutorial_Profile.csv", + csv_file_properties=ThermalProfileCsvFileProperties( + profile_name="", + header_row_count=0, + column_delimiter=",", + step_column="Step", + type_column="Type", + time_column="Time", + time_units="min", + temperature_column="Temp", + temperature_units="C", + ), + ) + pytest.fail("No exception raised when using missing profile name") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, profile_name is invalid because it is None or empty." + ) + + try: + lifecycle.load_thermal_profile( + "Test Project", + "Phase 1", + "Thermal Event", + "Tutorial_Profile.csv", + csv_file_properties=ThermalProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + step_column="", + type_column="Type", + time_column="Time", + time_units="min", + temperature_column="Temp", + temperature_units="C", + ), + ) + pytest.fail("No exception raised when using missing step column") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, step_column is invalid because it is None or empty." + ) + + try: + lifecycle.load_thermal_profile( + "Test Project", + "Phase 1", + "Thermal Event", + "Tutorial_Profile.csv", + csv_file_properties=ThermalProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + step_column="Step", + type_column="", + time_column="Time", + time_units="min", + temperature_column="Temp", + temperature_units="C", + ), + ) + pytest.fail("No exception raised when using missing type column") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, type_column is invalid because it is None or empty." + ) + + try: + lifecycle.load_thermal_profile( + "Test Project", + "Phase 1", + "Thermal Event", + "Tutorial_Profile.csv", + csv_file_properties=ThermalProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + step_column="Step", + type_column="Type", + time_column="", + time_units="min", + temperature_column="Temp", + temperature_units="C", + ), + ) + pytest.fail("No exception raised when using missing time column") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, time_column is invalid because it is None or empty." + ) + + try: + lifecycle.load_thermal_profile( + "Test Project", + "Phase 1", + "Thermal Event", + "Tutorial_Profile.csv", + csv_file_properties=ThermalProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + step_column="Step", + type_column="Type", + time_column="Time", + time_units="", + temperature_column="Temp", + temperature_units="C", + ), + ) + pytest.fail("No exception raised when using missing time units") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, time_units is invalid because it is None or empty." + ) + + try: + lifecycle.load_thermal_profile( + "Test Project", + "Phase 1", + "Thermal Event", + "Tutorial_Profile.csv", + csv_file_properties=ThermalProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + step_column="Step", + type_column="Type", + time_column="Time", + time_units="min", + temperature_column="", + temperature_units="C", + ), + ) + pytest.fail("No exception raised when using missing temperature column") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, temperature_column is invalid because it is None or empty." + ) + + try: + lifecycle.load_thermal_profile( + "Test Project", + "Phase 1", + "Thermal Event", + "Tutorial_Profile.csv", + csv_file_properties=ThermalProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + step_column="Step", + type_column="Type", + time_column="Time", + time_units="min", + temperature_column="Temp", + temperature_units="", + ), + ) + pytest.fail("No exception raised when using missing temperature units") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, temperature_units is invalid because it is None or empty." + ) + + try: + lifecycle.load_thermal_profile( + "Test Project", + "Phase 1", + "Thermal Event", + "Tutorial_Profile.csv", + csv_file_properties=ThermalProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=-1, + column_delimiter=",", + step_column="Step", + type_column="Type", + time_column="Time", + time_units="min", + temperature_column="Temp", + temperature_units="C", + ), + ) + pytest.fail("No exception raised when using invalid header_row_count") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, header_row_count must be greater than or equal to 0." + ) + + try: + lifecycle.load_thermal_profile( + "Test Project", + "Phase 1", + "Thermal Event", + "Tutorial_Profile.dat", + csv_file_properties=ThermalProfileCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + step_column="Step", + type_column="Type", + time_column="Time", + time_units="min", + temperature_column="Temp", + temperature_units="C", + ), + ) + pytest.fail("No exception raised when using csv_file_properties for non-CSV file") + except SherlockLoadThermalProfileError as e: + assert ( + str(e.message) == "CSV file properties are not used for non-CSV thermal profile files." + ) if lifecycle._is_connection_up(): # happy path test missing because needs valid file @@ -1961,46 +2581,227 @@ def helper_test_load_shock_profile_dataset(lifecycle: Lifecycle): ) pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadShockProfileDatasetError as e: - assert str(e.str_itr()) == "['Load shock profile dataset error: Project name is invalid.']" + assert str(e.message) == "Project name is invalid." try: lifecycle.load_shock_profile_dataset( - "Test", + "Test Project", "", "Shock Event", "Test_Profile.dat", ) pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadShockProfileDatasetError as e: - assert str(e.str_itr()) == "['Load shock profile dataset error: Phase name is invalid.']" + assert str(e.message) == "Phase name is invalid." try: lifecycle.load_shock_profile_dataset( - "Test", + "Test Project", "Phase 1", "", "Test_Profile.dat", ) pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadShockProfileDatasetError as e: - assert str(e.str_itr()) == "['Load shock profile dataset error: Event name is invalid.']" + assert str(e.message) == "Event name is invalid." try: lifecycle.load_shock_profile_dataset( - "Test", + "Test Project", "Phase 1", "Shock Event", "", ) pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadShockProfileDatasetError as e: - assert str(e.str_itr()) == "['Load shock profile dataset error: File path is invalid.']" + assert str(e.message) == "File path is invalid." + + try: + lifecycle.load_shock_profile_dataset( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + ) + pytest.fail("No exception raised when using missing CSV properties") + except SherlockLoadShockProfileDatasetError as e: + assert ( + str(e.message) + == "CSV file properties must be provided for CSV shock profile dataset files." + ) + + try: + lifecycle.load_shock_profile_dataset( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfileDatasetCsvFileProperties( + profile_name="", + header_row_count=0, + column_delimiter=",", + time_column="Time", + time_units="ms", + load_column="Load", + load_units="G", + ), + ) + pytest.fail("No exception raised when using missing profile name") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, profile_name is invalid because it is None or empty." + ) + + try: + lifecycle.load_shock_profile_dataset( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfileDatasetCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + time_column="", + time_units="ms", + load_column="Load", + load_units="G", + ), + ) + pytest.fail("No exception raised when using missing time column") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, time_column is invalid because it is None or empty." + ) + + try: + lifecycle.load_shock_profile_dataset( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfileDatasetCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + time_column="Time", + time_units="", + load_column="Load", + load_units="G", + ), + ) + pytest.fail("No exception raised when using missing time units") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, time_units is invalid because it is None or empty." + ) + + try: + lifecycle.load_shock_profile_dataset( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfileDatasetCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + time_column="Time", + time_units="ms", + load_column="", + load_units="G", + ), + ) + pytest.fail("No exception raised when using missing load column") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, load_column is invalid because it is None or empty." + ) + + try: + lifecycle.load_shock_profile_dataset( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfileDatasetCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + time_column="Time", + time_units="ms", + load_column="Load", + load_units="", + ), + ) + pytest.fail("No exception raised when using missing load units") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, load_units is invalid because it is None or empty." + ) + + try: + lifecycle.load_shock_profile_dataset( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfileDatasetCsvFileProperties( + profile_name="Test Profile", + header_row_count=-1, + column_delimiter=",", + time_column="Time", + time_units="ms", + load_column="Load", + load_units="G", + ), + ) + pytest.fail("No exception raised when using invalid header_row_count") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, header_row_count must be greater than or equal to 0." + ) + + try: + lifecycle.load_shock_profile_dataset( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.dat", + csv_file_properties=ShockProfileDatasetCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + column_delimiter=",", + time_column="Time", + time_units="ms", + load_column="Load", + load_units="G", + ), + ) + pytest.fail("No exception raised when using csv_file_properties for non-CSV file") + except SherlockLoadShockProfileDatasetError as e: + assert ( + str(e.message) + == "CSV file properties are not used for non-CSV shock profile dataset files." + ) if lifecycle._is_connection_up(): # happy path test missing because needs valid file try: lifecycle.load_shock_profile_dataset( - "Tutorial Project", + "Test Project", "Phase 1", "Shock Event", "Test_Profile.dat", @@ -2021,46 +2822,456 @@ def helper_test_load_shock_profile_pulses(lifecycle: Lifecycle): ) pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadShockProfilePulsesError as e: - assert str(e.str_itr()) == "['Load shock profile pulses error: Project name is invalid.']" + assert str(e.message) == "Project name is invalid." try: lifecycle.load_shock_profile_pulses( - "Test", + "Test Project", "", "Shock Event", "Test_Profile.dat", ) pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadShockProfilePulsesError as e: - assert str(e.str_itr()) == "['Load shock profile pulses error: Phase name is invalid.']" + assert str(e.message) == "Phase name is invalid." try: lifecycle.load_shock_profile_pulses( - "Test", + "Test Project", "Phase 1", "", "Test_Profile.dat", ) pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadShockProfilePulsesError as e: - assert str(e.str_itr()) == "['Load shock profile pulses error: Event name is invalid.']" + assert str(e.message) == "Event name is invalid." try: lifecycle.load_shock_profile_pulses( - "Test", + "Test Project", "Phase 1", "Shock Event", "", ) pytest.fail("No exception raised when using an invalid parameter") except SherlockLoadShockProfilePulsesError as e: - assert str(e.str_itr()) == "['Load shock profile pulses error: File path is invalid.']" + assert str(e.message) == "File path is invalid." + + try: + lifecycle.load_shock_profile_pulses( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + ) + pytest.fail("No exception raised when using missing CSV properties") + except SherlockLoadShockProfilePulsesError as e: + assert ( + str(e.message) + == "CSV file properties must be provided for CSV shock profile pulses files." + ) + + try: + lifecycle.load_shock_profile_pulses( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfilePulsesCsvFileProperties( + profile_name="", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + duration=25, + duration_units="ms", + sample_rate=0.1, + sample_rate_units="ms", + shape_column="Shape", + load_column="Load", + load_units="G", + frequency_column="Frequency", + frequency_units="HZ", + decay_column="Decay", + ), + ) + pytest.fail("No exception raised when using missing profile name") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, profile_name is invalid because it is None or empty." + ) + + try: + lifecycle.load_shock_profile_pulses( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfilePulsesCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + duration=25, + duration_units="ms", + sample_rate=0.1, + sample_rate_units="ms", + shape_column="", + load_column="Load", + load_units="G", + frequency_column="Frequency", + frequency_units="HZ", + decay_column="Decay", + ), + ) + pytest.fail("No exception raised when using missing shape column") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, shape_column is invalid because it is None or empty." + ) + + try: + lifecycle.load_shock_profile_pulses( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfilePulsesCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + duration=25, + duration_units="ms", + sample_rate=0.1, + sample_rate_units="ms", + shape_column="Shape", + load_column="", + load_units="G", + frequency_column="Frequency", + frequency_units="HZ", + decay_column="Decay", + ), + ) + pytest.fail("No exception raised when using missing load column") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, load_column is invalid because it is None or empty." + ) + + try: + lifecycle.load_shock_profile_pulses( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfilePulsesCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + duration=25, + duration_units="ms", + sample_rate=0.1, + sample_rate_units="ms", + shape_column="Shape", + load_column="Load", + load_units="", + frequency_column="Frequency", + frequency_units="HZ", + decay_column="Decay", + ), + ) + pytest.fail("No exception raised when using missing load units") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, load_units is invalid because it is None or empty." + ) + + try: + lifecycle.load_shock_profile_pulses( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfilePulsesCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + duration=25, + duration_units="ms", + sample_rate=0.1, + sample_rate_units="ms", + shape_column="Shape", + load_column="Load", + load_units="G", + frequency_column="", + frequency_units="HZ", + decay_column="Decay", + ), + ) + pytest.fail("No exception raised when using missing frequency column") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, frequency_column is invalid because it is None or empty." + ) + + try: + lifecycle.load_shock_profile_pulses( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfilePulsesCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + duration=25, + duration_units="ms", + sample_rate=0.1, + sample_rate_units="ms", + shape_column="Shape", + load_column="Load", + load_units="G", + frequency_column="Frequency", + frequency_units="", + decay_column="Decay", + ), + ) + pytest.fail("No exception raised when using missing frequency units") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, frequency_units is invalid because it is None or empty." + ) + + try: + lifecycle.load_shock_profile_pulses( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfilePulsesCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + duration=25, + duration_units="ms", + sample_rate=0.1, + sample_rate_units="ms", + shape_column="Shape", + load_column="Load", + load_units="G", + frequency_column="Frequency", + frequency_units="HZ", + decay_column="", + ), + ) + pytest.fail("No exception raised when using missing decay column") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, decay_column is invalid because it is None or empty." + ) + + try: + lifecycle.load_shock_profile_pulses( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfilePulsesCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + duration=25, + duration_units="", + sample_rate=0.1, + sample_rate_units="ms", + shape_column="Shape", + load_column="Load", + load_units="G", + frequency_column="Frequency", + frequency_units="HZ", + decay_column="Decay", + ), + ) + pytest.fail("No exception raised when using missing duration units") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, duration_units is invalid because it is None or empty." + ) + + try: + lifecycle.load_shock_profile_pulses( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfilePulsesCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + duration=25, + duration_units="ms", + sample_rate=0.1, + sample_rate_units="", + shape_column="Shape", + load_column="Load", + load_units="G", + frequency_column="Frequency", + frequency_units="HZ", + decay_column="Decay", + ), + ) + pytest.fail("No exception raised when using missing sample rate units") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, sample_rate_units is invalid because it is None or empty." + ) + + try: + lifecycle.load_shock_profile_pulses( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfilePulsesCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + duration=-25, + duration_units="ms", + sample_rate=0.1, + sample_rate_units="ms", + shape_column="Shape", + load_column="Load", + load_units="G", + frequency_column="Frequency", + frequency_units="HZ", + decay_column="Decay", + ), + ) + pytest.fail("No exception raised when using negative duration") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert str(e.errors()[0]["msg"]) == "Value error, duration must be greater than 0." + + try: + lifecycle.load_shock_profile_pulses( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfilePulsesCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + duration=25, + duration_units="ms", + sample_rate=-0.1, + sample_rate_units="ms", + shape_column="Shape", + load_column="Load", + load_units="G", + frequency_column="Frequency", + frequency_units="HZ", + decay_column="Decay", + ), + ) + pytest.fail("No exception raised when using negative sample rate") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert str(e.errors()[0]["msg"]) == "Value error, sample_rate must be greater than 0." + + try: + lifecycle.load_shock_profile_pulses( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.csv", + csv_file_properties=ShockProfilePulsesCsvFileProperties( + profile_name="Test Profile", + header_row_count=-1, + numeric_format="English", + column_delimiter=",", + duration=25, + duration_units="ms", + sample_rate=0.1, + sample_rate_units="ms", + shape_column="Shape", + load_column="Load", + load_units="G", + frequency_column="Frequency", + frequency_units="HZ", + decay_column="Decay", + ), + ) + pytest.fail("No exception raised when using invalid header_row_count") + except Exception as e: + assert isinstance(e, pydantic.ValidationError) + assert ( + str(e.errors()[0]["msg"]) + == "Value error, header_row_count must be greater than or equal to 0." + ) + + try: + lifecycle.load_shock_profile_pulses( + "Test Project", + "Phase 1", + "Shock Event", + "Test_Profile.dat", + csv_file_properties=ShockProfilePulsesCsvFileProperties( + profile_name="Test Profile", + header_row_count=0, + numeric_format="English", + column_delimiter=",", + duration=25, + duration_units="ms", + sample_rate=0.1, + sample_rate_units="ms", + shape_column="Shape", + load_column="Load", + load_units="G", + frequency_column="Frequency", + frequency_units="HZ", + decay_column="Decay", + ), + ) + pytest.fail("No exception raised when using csv_file_properties for non-CSV file") + except SherlockLoadShockProfilePulsesError as e: + assert ( + str(e.message) + == "CSV file properties are not used for non-CSV shock profile pulses files." + ) if lifecycle._is_connection_up(): # happy path test missing because needs valid file try: lifecycle.load_shock_profile_pulses( - "Tutorial Project", + "Test Project", "Phase 1", "Shock Event", "Test_Profile.dat", From 06ef2fb9793e73120b3edb61192328287941c534 Mon Sep 17 00:00:00 2001 From: Paul Walters Date: Mon, 17 Nov 2025 11:31:47 -0500 Subject: [PATCH 08/10] Updated lifecycle load profile methods to support CSV file imports from prior incomplete implementation. --- doc/source/api/lifecycle_types.rst | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/doc/source/api/lifecycle_types.rst b/doc/source/api/lifecycle_types.rst index f8861ebc4..6ec78a336 100644 --- a/doc/source/api/lifecycle_types.rst +++ b/doc/source/api/lifecycle_types.rst @@ -7,6 +7,16 @@ LifeCycle Types Constants --------- +.. autoclass:: HarmonicVibeProfileCsvFileProperties + :members: +.. autoclass:: RandomVibeProfileCsvFileProperties + :members: +.. autoclass:: ShockProfileDatasetCsvFileProperties + :members: +.. autoclass:: ShockProfilePulsesCsvFileProperties + :members: +.. autoclass:: ThermalProfileCsvFileProperties + :members: .. autoclass:: ThermalSignalFileProperties :members: .. autoclass:: ImportThermalSignalRequest From 04e228541e59ff1e4125230c286dc79c8f9e617e Mon Sep 17 00:00:00 2001 From: Paul Walters Date: Mon, 17 Nov 2025 11:41:13 -0500 Subject: [PATCH 09/10] Updated lifecycle load profile methods to support CSV file imports from prior incomplete implementation. --- src/ansys/sherlock/core/lifecycle.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ansys/sherlock/core/lifecycle.py b/src/ansys/sherlock/core/lifecycle.py index c37a3ea88..f0755e549 100644 --- a/src/ansys/sherlock/core/lifecycle.py +++ b/src/ansys/sherlock/core/lifecycle.py @@ -1832,7 +1832,7 @@ def load_random_vibe_profile( else: if csv_file_properties is not None: raise SherlockLoadRandomVibeProfileError( - "CSV file properties are not used for " "non-CSV random vibe profile files." + "CSV file properties are not used for non-CSV random vibe profile files." ) if not self._is_connection_up(): From 32d523e746f1bfb56f312ff3860c7f43d1f3b62c Mon Sep 17 00:00:00 2001 From: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> Date: Mon, 17 Nov 2025 17:27:17 +0000 Subject: [PATCH 10/10] chore: adding changelog file 667.miscellaneous.md [dependabot-skip] --- doc/changelog.d/667.miscellaneous.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/changelog.d/667.miscellaneous.md diff --git a/doc/changelog.d/667.miscellaneous.md b/doc/changelog.d/667.miscellaneous.md new file mode 100644 index 000000000..99bbabb9d --- /dev/null +++ b/doc/changelog.d/667.miscellaneous.md @@ -0,0 +1 @@ +Fix: Unable to load CSV files into life cycle profiles