From 267a96f6f2e9786fa093151bab1e86f65d5ec42d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Wed, 8 Oct 2025 15:15:40 +0100 Subject: [PATCH 1/6] =?UTF-8?q?=F0=9F=9A=A8=20Notify=20on=20deprecation=20?= =?UTF-8?q?of=20string=20on=20timestamps?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- simvue/api/objects/events.py | 6 +-- simvue/api/objects/folder.py | 6 ++- simvue/api/objects/run.py | 18 +++++-- simvue/api/objects/tag.py | 6 ++- simvue/metadata.py | 2 +- simvue/models.py | 96 ++++++++++++++++++++-------------- simvue/run.py | 99 ++++++++++++++++++------------------ simvue/utilities.py | 34 ------------- 8 files changed, 134 insertions(+), 133 deletions(-) diff --git a/simvue/api/objects/events.py b/simvue/api/objects/events.py index 2bd50ebd..81a0b8d1 100644 --- a/simvue/api/objects/events.py +++ b/simvue/api/objects/events.py @@ -16,7 +16,7 @@ from simvue.api.url import URL from .base import SimvueObject -from simvue.models import DATETIME_FORMAT, EventSet +from simvue.models import EventSet, simvue_timestamp from simvue.api.request import get as sv_get, get_json_from_response try: @@ -98,8 +98,8 @@ def histogram( "value difference must be greater than window" ) _url: URL = self._base_url / "histogram" - _time_begin: str = timestamp_begin.strftime(DATETIME_FORMAT) - _time_end: str = timestamp_end.strftime(DATETIME_FORMAT) + _time_begin: str = simvue_timestamp(timestamp_begin) + _time_end: str = simvue_timestamp(timestamp_end) _response = sv_get( url=_url, headers=self._headers, diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index ca917ee6..845368e3 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -226,7 +226,11 @@ def created(self) -> datetime.datetime | None: """Retrieve created datetime for the run""" _created: str | None = self._get_attribute("created") return ( - datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None + datetime.datetime.strptime(_created, DATETIME_FORMAT).replace( + tzinfo=datetime.timezone.utc + ) + if _created + else None ) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index bda02c93..31377f94 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -35,7 +35,7 @@ get_json_from_response, ) from simvue.api.url import URL -from simvue.models import FOLDER_REGEX, NAME_REGEX, DATETIME_FORMAT +from simvue.models import FOLDER_REGEX, NAME_REGEX, DATETIME_FORMAT, simvue_timestamp Status = typing.Literal[ "lost", "failed", "completed", "terminated", "running", "created" @@ -478,14 +478,18 @@ def started(self) -> datetime.datetime | None: """ _started: str | None = self._get_attribute("started") return ( - datetime.datetime.strptime(_started, DATETIME_FORMAT) if _started else None + datetime.datetime.strptime(_started, DATETIME_FORMAT).replace( + tzinfo=datetime.timezone.utc + ) + if _started + else None ) @started.setter @write_only @pydantic.validate_call def started(self, started: datetime.datetime) -> None: - self._staging["started"] = started.strftime(DATETIME_FORMAT) + self._staging["started"] = simvue_timestamp(started) @property @staging_check @@ -498,14 +502,18 @@ def endtime(self) -> datetime.datetime | None: """ _endtime: str | None = self._get_attribute("endtime") return ( - datetime.datetime.strptime(_endtime, DATETIME_FORMAT) if _endtime else None + datetime.datetime.strptime(_endtime, DATETIME_FORMAT).replace( + tzinfo=datetime.timezone.utc + ) + if _endtime + else None ) @endtime.setter @write_only @pydantic.validate_call def endtime(self, endtime: datetime.datetime) -> None: - self._staging["endtime"] = endtime.strftime(DATETIME_FORMAT) + self._staging["endtime"] = simvue_timestamp(endtime) @property def metrics( diff --git a/simvue/api/objects/tag.py b/simvue/api/objects/tag.py index 464dc874..38191478 100644 --- a/simvue/api/objects/tag.py +++ b/simvue/api/objects/tag.py @@ -100,7 +100,11 @@ def created(self) -> datetime.datetime | None: """Retrieve created datetime for the run""" _created: str | None = self._get_attribute("created") return ( - datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None + datetime.datetime.strptime(_created, DATETIME_FORMAT).replace( + tzinfo=datetime.timezone.utc + ) + if _created + else None ) @classmethod diff --git a/simvue/metadata.py b/simvue/metadata.py index 614de6e0..502f751f 100644 --- a/simvue/metadata.py +++ b/simvue/metadata.py @@ -16,7 +16,7 @@ import logging import pathlib -from simvue.utilities import simvue_timestamp +from simvue.models import simvue_timestamp logger = logging.getLogger(__file__) diff --git a/simvue/models.py b/simvue/models.py index 62e988f1..c4de5411 100644 --- a/simvue/models.py +++ b/simvue/models.py @@ -1,6 +1,7 @@ import datetime import typing import numpy +import warnings import pydantic @@ -18,6 +19,54 @@ ] +def validate_timestamp(timestamp: str, raise_except: bool = True) -> bool: + """ + Validate a user-provided timestamp + """ + try: + _ = datetime.datetime.strptime(timestamp, DATETIME_FORMAT) + except ValueError as e: + if raise_except: + raise e + return False + + return True + + +@pydantic.validate_call(config={"validate_default": True}) +def simvue_timestamp( + date_time: datetime.datetime + | typing.Annotated[str | None, pydantic.BeforeValidator(validate_timestamp)] + | None = None, +) -> str: + """Return the Simvue valid timestamp + + Parameters + ---------- + date_time: datetime.datetime, optional + if provided, the datetime object to convert, else use current date and time + + Returns + ------- + str + Datetime string valid for the Simvue server + """ + if isinstance(date_time, str): + warnings.warn( + "Timestamps as strings for object recording will be deprecated in Python API >= 2.3" + ) + if not date_time: + date_time = datetime.datetime.now(datetime.timezone.utc) + elif isinstance(date_time, str): + _local_time = datetime.datetime.now().tzinfo + date_time = ( + datetime.datetime.strptime(date_time, DATETIME_FORMAT) + .replace(tzinfo=_local_time) + .astimezone(datetime.timezone.utc) + ) + return date_time.strftime(DATETIME_FORMAT) + + # Pydantic class to validate run.init() class RunInput(pydantic.BaseModel): model_config = pydantic.ConfigDict(extra="forbid") @@ -33,44 +82,24 @@ class RunInput(pydantic.BaseModel): class MetricSet(pydantic.BaseModel): model_config = pydantic.ConfigDict(extra="forbid") time: pydantic.NonNegativeFloat | pydantic.NonNegativeInt - timestamp: str + timestamp: typing.Annotated[str | None, pydantic.BeforeValidator(simvue_timestamp)] step: pydantic.NonNegativeInt values: dict[str, int | float | bool] - @pydantic.field_validator("timestamp", mode="after") - @classmethod - def timestamp_str(cls, value: str) -> str: - try: - _ = datetime.datetime.strptime(value, DATETIME_FORMAT) - except ValueError as e: - raise AssertionError( - f"Invalid timestamp, expected form '{DATETIME_FORMAT}'" - ) from e - return value - class GridMetricSet(pydantic.BaseModel): - model_config = pydantic.ConfigDict(arbitrary_types_allowed=True, extra="forbid") + model_config = pydantic.ConfigDict( + arbitrary_types_allowed=True, extra="forbid", validate_default=True + ) time: pydantic.NonNegativeFloat | pydantic.NonNegativeInt - timestamp: str + timestamp: typing.Annotated[str | None, pydantic.BeforeValidator(simvue_timestamp)] step: pydantic.NonNegativeInt - array: list | numpy.ndarray + array: list[float] | numpy.ndarray grid: str metric: str - @pydantic.field_validator("timestamp", mode="after") - @classmethod - def timestamp_str(cls, value: str) -> str: - try: - datetime.datetime.strptime(value, DATETIME_FORMAT) - except ValueError as e: - raise AssertionError( - f"Invalid timestamp, expected form '{DATETIME_FORMAT}'" - ) from e - return value - @pydantic.field_serializer("array", when_used="always") - def serialize_array(self, value: numpy.ndarray | list, *_) -> list: + def serialize_array(self, value: numpy.ndarray | list[float], *_) -> list[float]: if isinstance(value, list): return value return value.tolist() @@ -79,15 +108,4 @@ def serialize_array(self, value: numpy.ndarray | list, *_) -> list: class EventSet(pydantic.BaseModel): model_config = pydantic.ConfigDict(extra="forbid") message: str - timestamp: str - - @pydantic.field_validator("timestamp", mode="after") - @classmethod - def timestamp_str(cls, value: str) -> str: - try: - datetime.datetime.strptime(value, DATETIME_FORMAT) - except ValueError as e: - raise AssertionError( - f"Invalid timestamp, expected form '{DATETIME_FORMAT}'" - ) from e - return value + timestamp: typing.Annotated[str | None, pydantic.BeforeValidator(simvue_timestamp)] diff --git a/simvue/run.py b/simvue/run.py index c0904d78..411bc25c 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -13,6 +13,7 @@ import multiprocessing.synchronize import threading import humanfriendly +import datetime import os import multiprocessing import pydantic @@ -41,14 +42,18 @@ from .factory.dispatch import Dispatcher from .executor import Executor, get_current_shell from .metrics import SystemResourceMeasurement -from .models import FOLDER_REGEX, NAME_REGEX, MetricKeyString +from .models import ( + FOLDER_REGEX, + NAME_REGEX, + MetricKeyString, + validate_timestamp, + simvue_timestamp, +) from .system import get_system from .metadata import git_info, environment from .eco import CO2Monitor from .utilities import ( skip_if_failed, - validate_timestamp, - simvue_timestamp, ) from .api.objects import ( Run as RunObject, @@ -281,11 +286,6 @@ def duration(self) -> float: """Return current run duration""" return time.time() - self._start_time - @property - def time_stamp(self) -> str: - """Return current timestamp""" - return simvue_timestamp() - @property def processes(self) -> list[psutil.Process]: """Create an array containing a list of processes""" @@ -1256,15 +1256,21 @@ def update_tags(self, tags: list[str]) -> bool: @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised - @pydantic.validate_call - def log_event(self, message: str, timestamp: str | None = None) -> bool: + @pydantic.validate_call(config={"validate_default": True}) + def log_event( + self, + message: str, + timestamp: typing.Annotated[ + datetime.datetime | str | None, pydantic.BeforeValidator(simvue_timestamp) + ] = None, + ) -> bool: """Log event to the server Parameters ---------- message : str event message to log - timestamp : str, optional + timestamp : datetime, optional manually specify the time stamp for this log, by default None Returns @@ -1287,11 +1293,7 @@ def log_event(self, message: str, timestamp: str | None = None) -> bool: self._error("Cannot log events when not in the running state") return False - if timestamp and not validate_timestamp(timestamp): - self._error("Invalid timestamp format") - return False - - _data = {"message": message, "timestamp": timestamp or self.time_stamp} + _data = {"message": message, "timestamp": timestamp} self._dispatcher.add_item(_data, "events", self._queue_blocking) return True @@ -1302,7 +1304,7 @@ def _add_metrics_to_dispatch( *, step: int | None = None, time: float | None = None, - timestamp: str | None = None, + timestamp: datetime.datetime | str | None = None, join_on_fail: bool = True, ) -> bool: if self._user_config.run.mode == "disabled": @@ -1326,14 +1328,14 @@ def _add_metrics_to_dispatch( ) return False - if timestamp and not validate_timestamp(timestamp): + if isinstance(timestamp, str) and not validate_timestamp(timestamp): self._error("Invalid timestamp format", join_on_fail) return False _data: dict[str, typing.Any] = { "values": metrics, "time": time if time is not None else self.duration, - "timestamp": timestamp if timestamp is not None else self.time_stamp, + "timestamp": simvue_timestamp(timestamp), "step": step if step is not None else self._step, } self._dispatcher.add_item(_data, "metrics_regular", self._queue_blocking) @@ -1346,53 +1348,48 @@ def _add_tensors_to_dispatch( *, step: int | None = None, time: float | None = None, - timestamp: str | None = None, + timestamp: datetime.datetime | str | None = None, join_on_fail: bool = True, ) -> bool: - for tensor, array in tensors.items(): - _data: dict[str, typing.Any] = { - "array": array, - "time": time if time is not None else self.duration, - "timestamp": timestamp if timestamp is not None else self.time_stamp, - "step": step if step is not None else self._step, - "grid": self._grids[tensor]["id"], - "metric": tensor, - } - - self._dispatcher.add_item(_data, "metrics_tensor", self._queue_blocking) - - return True - - def _add_values_to_dispatch( - self, - values: dict[str, int | float | numpy.ndarray], - *, - step: int | None = None, - time: float | None = None, - timestamp: str | None = None, - join_on_fail: bool = True, - ) -> bool: - # If there are no metrics to log just ignore if self._user_config.run.mode == "disabled": return True - if not values: + # If there are no metrics to log just ignore + if not tensors: return True + if not self._sv_obj or not self._dispatcher: + self._error("Cannot log tensors, run not initialised", join_on_fail) + return False + if not self._active: self._error("Run is not active", join_on_fail) return False if self._status != "running": self._error( - "Cannot log metrics when not in the running state", join_on_fail + "Cannot log tensors when not in the running state", join_on_fail ) return False - if timestamp and not validate_timestamp(timestamp): + if isinstance(timestamp, str) and not validate_timestamp(timestamp): self._error("Invalid timestamp format", join_on_fail) return False + for tensor, array in tensors.items(): + _data: dict[str, typing.Any] = { + "array": array, + "time": time if time is not None else self.duration, + "timestamp": simvue_timestamp(timestamp), + "step": step if step is not None else self._step, + "grid": self._grids[tensor]["id"], + "metric": tensor, + } + + self._dispatcher.add_item(_data, "metrics_tensor", self._queue_blocking) + + return True + @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised @pydantic.validate_call(config={"arbitrary_types_allowed": True}) @@ -1475,13 +1472,17 @@ def assign_metric_to_grid( @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised - @pydantic.validate_call(config={"arbitrary_types_allowed": True}) + @pydantic.validate_call( + config={"arbitrary_types_allowed": True, "validate_default": True} + ) def log_metrics( self, metrics: dict[MetricKeyString, int | float | numpy.ndarray], step: int | None = None, time: float | None = None, - timestamp: str | None = None, + timestamp: typing.Annotated[ + datetime.datetime | str | None, pydantic.BeforeValidator(simvue_timestamp) + ] = None, ) -> bool: """Log metrics to Simvue server. diff --git a/simvue/utilities.py b/simvue/utilities.py index 4f905eac..975536ea 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -1,4 +1,3 @@ -import datetime import hashlib import logging import json @@ -14,9 +13,6 @@ import jwt from deepmerge import Merger -from datetime import timezone -from simvue.models import DATETIME_FORMAT - CHECKSUM_BLOCK_SIZE = 4096 EXTRAS: tuple[str, ...] = ("plot", "torch") @@ -372,36 +368,6 @@ def calculate_sha256(filename: str | typing.Any, is_file: bool) -> str | None: return sha256_hash.hexdigest() -def validate_timestamp(timestamp): - """ - Validate a user-provided timestamp - """ - try: - datetime.datetime.strptime(timestamp, DATETIME_FORMAT) - except ValueError: - return False - - return True - - -def simvue_timestamp(date_time: datetime.datetime | None = None) -> str: - """Return the Simvue valid timestamp - - Parameters - ---------- - date_time: datetime.datetime, optional - if provided, the datetime object to convert, else use current date and time - - Returns - ------- - str - Datetime string valid for the Simvue server - """ - if not date_time: - date_time = datetime.datetime.now(timezone.utc) - return date_time.strftime(DATETIME_FORMAT) - - @functools.lru_cache def get_mimetypes() -> list[str]: """Returns a list of allowed MIME types""" From 48bef67c66eb83ef7db0e4e8dafbde0714701590 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Fri, 10 Oct 2025 11:58:41 +0100 Subject: [PATCH 2/6] =?UTF-8?q?=F0=9F=93=9D=20Add=20additional=20informati?= =?UTF-8?q?on=20regarding=20datetime=20to=20docstrings?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- simvue/models.py | 6 ++++-- simvue/run.py | 8 +++++--- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/simvue/models.py b/simvue/models.py index c4de5411..c65a75c0 100644 --- a/simvue/models.py +++ b/simvue/models.py @@ -43,8 +43,10 @@ def simvue_timestamp( Parameters ---------- - date_time: datetime.datetime, optional - if provided, the datetime object to convert, else use current date and time + date_time: datetime.datetime | str, optional + if provided, the datetime object to convert, + else use current date and time + if a string assume to be local time. Returns ------- diff --git a/simvue/run.py b/simvue/run.py index 411bc25c..3664fa3c 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1270,8 +1270,9 @@ def log_event( ---------- message : str event message to log - timestamp : datetime, optional + timestamp : datetime.datetime | str, optional manually specify the time stamp for this log, by default None + if a string is provided, local time Returns ------- @@ -1494,8 +1495,9 @@ def log_metrics( manually specify the step index for this log, by default None time : int, optional manually specify the time for this log, by default None - timestamp : str, optional - manually specify the timestamp for this log, by default None + timestamp : datetime.datetime | str, optional + manually specify the time stamp for this log, by default None + if a string is provided, local time Returns ------- From 6c66490f2df82d06256927b2ef12c8ac800f5fe9 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 13 Oct 2025 10:52:23 +0100 Subject: [PATCH 3/6] Add optional throw exceptions --- simvue/sender.py | 24 +++++++++++++++++++++--- tests/unit/test_sender.py | 8 +++++++- 2 files changed, 28 insertions(+), 4 deletions(-) diff --git a/simvue/sender.py b/simvue/sender.py index ee8f2913..30e869d9 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -56,6 +56,7 @@ def upload_cached_file( obj_type: str, file_path: pydantic.FilePath, id_mapping: dict[str, str], + throw_exceptions: bool, retry_failed_uploads: bool, lock: threading.Lock, ) -> None: @@ -71,6 +72,10 @@ def upload_cached_file( The path to the cached file to upload id_mapping : dict[str, str] A mapping of offline to online object IDs + throw_exceptions : bool + Whether to throw exceptions, or just log them + retry_failed_uploads : bool + Whether to retry failed uploads or ignore them lock : threading.Lock A lock to prevent multiple threads accessing the id mapping directory at once """ @@ -83,7 +88,10 @@ def upload_cached_file( try: _instance_class = getattr(simvue.api.objects, _exact_type) - except AttributeError: + except AttributeError as error: + if throw_exceptions: + raise error + _logger.error(f"Attempt to initialise unknown type '{_exact_type}'") _log_upload_failed(file_path) return @@ -109,11 +117,13 @@ def upload_cached_file( _new_id = obj_for_upload.id except Exception as error: - if "status 409" in error.args[0]: + if "status 409" in str(error): return + if throw_exceptions: + raise error _logger.error( - f"Error while committing '{_instance_class.__name__}': {error.args[0]}" + f"Error while committing '{_instance_class.__name__}': {str(error)}" ) _log_upload_failed(file_path) return @@ -182,6 +192,7 @@ def sender( max_workers: int = 5, threading_threshold: int = 10, objects_to_upload: list[str] = UPLOAD_ORDER, + throw_exceptions: bool = False, retry_failed_uploads: bool = False, ) -> dict[str, str]: """Send data from a local cache directory to the Simvue server. @@ -196,6 +207,8 @@ def sender( The number of cached files above which threading will be used objects_to_upload : list[str] Types of objects to upload, by default uploads all types of objects present in cache + throw_exceptions : bool, optional + Whether to throw exceptions as they are encountered in the sender, default is False (exceptions will be logged) retry_failed_uploads : bool, optional Whether to retry sending objects which previously failed, by default False @@ -238,6 +251,7 @@ def sender( obj_type=_obj_type, file_path=file_path, id_mapping=_id_mapping, + throw_exceptions=throw_exceptions, retry_failed_uploads=retry_failed_uploads, lock=_lock, ) @@ -251,11 +265,15 @@ def sender( obj_type=_obj_type, file_path=file_path, id_mapping=_id_mapping, + throw_exceptions=throw_exceptions, retry_failed_uploads=retry_failed_uploads, lock=_lock, ), _offline_files, ) + # This will raise any exceptions encountered during sending + for result in _results: + pass # Send heartbeats _headers: dict[str, str] = { diff --git a/tests/unit/test_sender.py b/tests/unit/test_sender.py index 0c8555a0..5d0933e6 100644 --- a/tests/unit/test_sender.py +++ b/tests/unit/test_sender.py @@ -13,11 +13,12 @@ import pathlib import requests +@pytest.mark.parametrize("throw_exceptions", (True, False)) @pytest.mark.parametrize("retry_failed_uploads", (True, False)) @pytest.mark.parametrize("parallel", (True, False)) @pytest.mark.offline -def test_sender_exception_handling(offline_cache_setup, caplog, retry_failed_uploads, parallel): +def test_sender_exception_handling(offline_cache_setup, caplog, throw_exceptions, retry_failed_uploads, parallel): # Create something which will produce an error when sent, eg a metric with invalid run ID for i in range(5): _metrics = Metrics.new( @@ -33,6 +34,11 @@ def test_sender_exception_handling(offline_cache_setup, caplog, retry_failed_upl offline=True ) _metrics.commit() + + if throw_exceptions: + with pytest.raises(ValueError): + sender(throw_exceptions=True, threading_threshold=1 if parallel else 10) + return with caplog.at_level(logging.ERROR): sender(threading_threshold=1 if parallel else 10) From 074d2f06abfaea2ba09843a5bd6bf7181d1aad45 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 13 Oct 2025 10:55:49 +0100 Subject: [PATCH 4/6] Sender in tests should throw errors --- tests/functional/test_run_class.py | 24 ++++++++++---------- tests/functional/test_run_execute_process.py | 2 +- tests/unit/test_event_alert.py | 6 ++--- tests/unit/test_events.py | 2 +- tests/unit/test_file_artifact.py | 6 ++--- tests/unit/test_file_storage.py | 2 +- tests/unit/test_folder.py | 6 ++--- tests/unit/test_grids.py | 4 ++-- tests/unit/test_metric_range_alert.py | 6 ++--- tests/unit/test_metric_threshold_alert.py | 6 ++--- tests/unit/test_metrics.py | 2 +- tests/unit/test_object_artifact.py | 2 +- tests/unit/test_run.py | 6 ++--- tests/unit/test_s3_storage.py | 2 +- tests/unit/test_tag.py | 6 ++--- tests/unit/test_tenant.py | 2 +- tests/unit/test_user.py | 2 +- tests/unit/test_user_alert.py | 10 ++++---- 18 files changed, 48 insertions(+), 48 deletions(-) diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 4a0d54dc..2654ff0a 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -116,7 +116,7 @@ def test_run_with_emissions_offline(speedy_heartbeat, mock_co2_signal, create_pl run_created.config(enable_emission_metrics=True) time.sleep(5) # Run should continue, but fail to log metrics until sender runs and creates file - id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"]) + id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], throw_exceptions=True) _run = RunObject(identifier=id_mapping[run_created.id]) _metric_names = [item[0] for item in _run.metrics] for _metric in ["emissions", "energy_consumed"]: @@ -126,7 +126,7 @@ def test_run_with_emissions_offline(speedy_heartbeat, mock_co2_signal, create_pl assert _delta_metric_name not in _metric_names # Sender should now have made a local file, and the run should be able to use it to create emissions metrics time.sleep(5) - id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"]) + id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], throw_exceptions=True) _run.refresh() _metric_names = [item[0] for item in _run.metrics] client = sv_cl.Client() @@ -318,7 +318,7 @@ def test_log_metrics_offline( run.log_metrics(METRICS) time.sleep(1) - id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) + id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) time.sleep(1) if metric_type == "tensor": @@ -441,7 +441,7 @@ def test_visibility_offline( retention_period=os.environ.get("SIMVUE_TESTING_RETENTION_PERIOD", "2 mins"), ) _id = run.id - _id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) + _id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) run.close() _retrieved_run = RunObject(identifier=_id_mapping.get(_id)) @@ -478,7 +478,7 @@ def test_log_events_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) - run, _ = create_plain_run_offline run_name = run.name run.log_event(EVENT_MSG) - sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) client = sv_cl.Client() attempts: int = 0 @@ -488,7 +488,7 @@ def test_log_events_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) - not (event_data := client.get_events(client.get_run_id_from_name(run_name), count_limit=1)) ) and attempts < 5: time.sleep(1) - sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) attempts += 1 assert event_data[0].get("message", EVENT_MSG) @@ -497,7 +497,7 @@ def test_log_events_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) - @pytest.mark.offline def test_offline_tags(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None: run, run_data = create_plain_run_offline - sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) client = sv_cl.Client() tags = client.get_tags() @@ -557,7 +557,7 @@ def test_update_metadata_offline( # Try updating an already defined piece of metadata run.update_metadata({"a": 1}) - sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) client = sv_cl.Client() run_info = client.get_run(client.get_run_id_from_name(run_name)) @@ -945,7 +945,7 @@ def test_save_file_offline( "w", ) as out_f: out_f.write("updated file!") - sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) os.remove(out_name) client = sv_cl.Client() base_name = name or out_name.name @@ -1031,7 +1031,7 @@ def test_update_tags_offline( simvue_run.update_tags(["additional"]) - sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) client = sv_cl.Client() run_data = client.get_run(client.get_run_id_from_name(run_name)) @@ -1358,7 +1358,7 @@ def test_reconnect_functionality(mode, monkeypatch: pytest.MonkeyPatch) -> None: ) run_id = run.id if mode == "offline": - _id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) + _id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) run_id = _id_mapping.get(run_id) client = simvue.Client() @@ -1372,7 +1372,7 @@ def test_reconnect_functionality(mode, monkeypatch: pytest.MonkeyPatch) -> None: run.log_event("Testing!") if mode == "offline": - _id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) + _id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10, throw_exceptions=True) _reconnected_run = client.get_run(run_id) assert dict(_reconnected_run.metrics)["test_metric"]["last"] == 1 diff --git a/tests/functional/test_run_execute_process.py b/tests/functional/test_run_execute_process.py index a3dfb3bc..1337e301 100644 --- a/tests/functional/test_run_execute_process.py +++ b/tests/functional/test_run_execute_process.py @@ -24,7 +24,7 @@ def test_monitor_processes(create_plain_run_offline: tuple[Run, dict]): _run.add_process(f"process_1_{os.environ.get('PYTEST_XDIST_WORKER', 0)}", Command="Write-Output 'Hello World!'", executable="powershell") _run.add_process(f"process_2_{os.environ.get('PYTEST_XDIST_WORKER', 0)}", Command="Get-ChildItem", executable="powershell") _run.add_process(f"process_3_{os.environ.get('PYTEST_XDIST_WORKER', 0)}", Command="exit 0", executable="powershell") - sender(_run._sv_obj._local_staging_file.parents[1], 1, 10, ["folders", "runs", "alerts"]) + sender(_run._sv_obj._local_staging_file.parents[1], 1, 10, ["folders", "runs", "alerts"], throw_exceptions=True) @pytest.mark.executor diff --git a/tests/unit/test_event_alert.py b/tests/unit/test_event_alert.py index 254a44d7..2e4d7722 100644 --- a/tests/unit/test_event_alert.py +++ b/tests/unit/test_event_alert.py @@ -56,7 +56,7 @@ def test_event_alert_creation_offline(offline_cache_setup) -> None: assert _local_data.get("name") == f"events_alert_{_uuid}" assert _local_data.get("notification") == "none" - _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) time.sleep(1) # Get online ID and retrieve alert @@ -106,7 +106,7 @@ def test_event_alert_modification_offline(offline_cache_setup) -> None: description=None ) _alert.commit() - _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) time.sleep(1) # Get online ID and retrieve alert @@ -130,7 +130,7 @@ def test_event_alert_modification_offline(offline_cache_setup) -> None: _local_data = json.load(in_f) assert _local_data.get("description") == "updated!" - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) time.sleep(1) _online_alert.refresh() diff --git a/tests/unit/test_events.py b/tests/unit/test_events.py index 41c2d2c1..1839f067 100644 --- a/tests/unit/test_events.py +++ b/tests/unit/test_events.py @@ -56,7 +56,7 @@ def test_events_creation_offline(offline_cache_setup) -> None: assert _local_data.get("events")[0].get("message") == "This is a test!" assert _local_data.get("events")[0].get("timestamp") == _timestamp - _id_mapping = sender(_events._local_staging_file.parents[1], 1, 10, ["folders", "runs", "events"]) + _id_mapping = sender(_events._local_staging_file.parents[1], 1, 10, ["folders", "runs", "events"], throw_exceptions=True) time.sleep(1) # Get online version of events diff --git a/tests/unit/test_file_artifact.py b/tests/unit/test_file_artifact.py index e59867b9..2ea95157 100644 --- a/tests/unit/test_file_artifact.py +++ b/tests/unit/test_file_artifact.py @@ -103,7 +103,7 @@ def test_file_artifact_creation_offline(offline_cache_setup, snapshot) -> None: # If snapshot, check artifact definition file and a copy of the actual file exist in staging area assert len(list(_artifact._local_staging_file.parent.iterdir())) == 2 if snapshot else 1 - _id_mapping = sender(pathlib.Path(offline_cache_setup.name), 1, 10) + _id_mapping = sender(pathlib.Path(offline_cache_setup.name), 1, 10, throw_exceptions=True) time.sleep(1) # Check file(s) deleted after upload @@ -159,11 +159,11 @@ def test_file_artifact_creation_offline_updated(offline_cache_setup, caplog, sna if not snapshot: with caplog.at_level(logging.ERROR): - _id_mapping = sender(pathlib.Path(offline_cache_setup.name), 1, 10) + _id_mapping = sender(pathlib.Path(offline_cache_setup.name), 1, 10, throw_exceptions=True) assert "The SHA256 you specified did not match the calculated checksum." in caplog.text return else: - _id_mapping = sender(pathlib.Path(offline_cache_setup.name), 1, 10) + _id_mapping = sender(pathlib.Path(offline_cache_setup.name), 1, 10, throw_exceptions=True) time.sleep(1) _online_artifact = Artifact(_id_mapping[_artifact.id]) diff --git a/tests/unit/test_file_storage.py b/tests/unit/test_file_storage.py index 57eb0d24..1c5c9ce6 100644 --- a/tests/unit/test_file_storage.py +++ b/tests/unit/test_file_storage.py @@ -38,7 +38,7 @@ def test_create_file_storage_offline(offline_cache_setup) -> None: assert _local_data.get("is_enabled") == False assert _local_data.get("is_default") == False - _id_mapping = sender(_storage._local_staging_file.parents[1], 1, 10, ["storage"]) + _id_mapping = sender(_storage._local_staging_file.parents[1], 1, 10, ["storage"], throw_exceptions=True) time.sleep(1) _online_storage = FileStorage(_id_mapping.get(_storage.id)) assert _online_storage.name == _uuid diff --git a/tests/unit/test_folder.py b/tests/unit/test_folder.py index e234dfe6..7004551f 100644 --- a/tests/unit/test_folder.py +++ b/tests/unit/test_folder.py @@ -42,7 +42,7 @@ def test_folder_creation_offline(offline_cache_setup) -> None: assert _folder._local_staging_file.name.split(".")[0] == _folder.id assert _local_data.get("path", None) == _path - sender(_folder._local_staging_file.parents[1], 2, 10, ["folders"]) + sender(_folder._local_staging_file.parents[1], 2, 10, ["folders"], throw_exceptions=True) time.sleep(1) client = Client() @@ -96,7 +96,7 @@ def test_folder_modification_offline(offline_cache_setup) -> None: _folder = Folder.new(path=_path, offline=True) _folder.commit() - sender(_folder._local_staging_file.parents[1], 2, 10, ["folders"]) + sender(_folder._local_staging_file.parents[1], 2, 10, ["folders"], throw_exceptions=True) time.sleep(1) client = Client() @@ -115,7 +115,7 @@ def test_folder_modification_offline(offline_cache_setup) -> None: assert _local_data.get("description", None) == _description assert _local_data.get("tags", None) == _tags - sender(_folder._local_staging_file.parents[1], 2, 10, ["folders"]) + sender(_folder._local_staging_file.parents[1], 2, 10, ["folders"], throw_exceptions=True) time.sleep(1) _folder_online.refresh() diff --git a/tests/unit/test_grids.py b/tests/unit/test_grids.py index a3764d96..91c5a05e 100644 --- a/tests/unit/test_grids.py +++ b/tests/unit/test_grids.py @@ -72,7 +72,7 @@ def test_grid_creation_offline() -> None: assert _local_data.get("runs", [None])[0] == [_run.id, "A"] npt.assert_array_equal(numpy.array(_local_data.get("grid")), _grid_def) - _id_mapping = sender(_grid._local_staging_file.parents[1], 1, 10, ["folders", "runs", "grids"]) + _id_mapping = sender(_grid._local_staging_file.parents[1], 1, 10, ["folders", "runs", "grids"], throw_exceptions=True) time.sleep(1) # Get online version of grid _online_grid = Grid(_id_mapping.get(_grid.id)) @@ -184,7 +184,7 @@ def test_grid_metrics_creation_offline() -> None: _metrics.commit() _run.status = "completed" _run.commit() - _id_mapping = sender(_grid._local_staging_file.parents[1], 1, 10, ["folders", "runs", "grids", "grid_metrics"]) + _id_mapping = sender(_grid._local_staging_file.parents[1], 1, 10, ["folders", "runs", "grids", "grid_metrics"], throw_exceptions=True) time.sleep(1) # Online metrics assert list(GridMetrics.get(runs=[_id_mapping[_run.id]], metrics=["A"], step=_step)) diff --git a/tests/unit/test_metric_range_alert.py b/tests/unit/test_metric_range_alert.py index 3707e0da..56347eff 100644 --- a/tests/unit/test_metric_range_alert.py +++ b/tests/unit/test_metric_range_alert.py @@ -62,7 +62,7 @@ def test_metric_range_alert_creation_offline(offline_cache_setup) -> None: assert _local_data.get("name") == f"metrics_range_alert_{_uuid}" assert _local_data.get("notification") == "none" assert _local_data.get("alert").get("range_low") == 10 - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) time.sleep(1) # Get online ID and retrieve alert @@ -124,7 +124,7 @@ def test_metric_range_alert_modification_offline(offline_cache_setup) -> None: offline=True ) _alert.commit() - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) time.sleep(1) # Get online ID and retrieve alert @@ -149,7 +149,7 @@ def test_metric_range_alert_modification_offline(offline_cache_setup) -> None: _local_data = json.load(in_f) assert _local_data.get("description") == "updated!" - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) time.sleep(1) _online_alert.refresh() diff --git a/tests/unit/test_metric_threshold_alert.py b/tests/unit/test_metric_threshold_alert.py index 014cd8d9..8f04c698 100644 --- a/tests/unit/test_metric_threshold_alert.py +++ b/tests/unit/test_metric_threshold_alert.py @@ -61,7 +61,7 @@ def test_metric_threshold_alert_creation_offline(offline_cache_setup) -> None: assert _local_data.get("notification") == "none" assert _local_data.get("alert").get("threshold") == 10 - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) time.sleep(1) # Get online ID and retrieve alert @@ -123,7 +123,7 @@ def test_metric_threshold_alert_modification_offline(offline_cache_setup) -> Non ) _alert.commit() - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) time.sleep(1) # Get online ID and retrieve alert @@ -149,7 +149,7 @@ def test_metric_threshold_alert_modification_offline(offline_cache_setup) -> Non _local_data = json.load(in_f) assert _local_data.get("description") == "updated!" - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) time.sleep(1) _online_alert.refresh() diff --git a/tests/unit/test_metrics.py b/tests/unit/test_metrics.py index 4d2823a7..fea65482 100644 --- a/tests/unit/test_metrics.py +++ b/tests/unit/test_metrics.py @@ -88,7 +88,7 @@ def test_metrics_creation_offline(offline_cache_setup) -> None: assert _local_data.get("metrics")[0].get("step") == _step assert _local_data.get("metrics")[0].get("time") == _time - _id_mapping = sender(_metrics._local_staging_file.parents[1], 1, 10, ["folders", "runs", "metrics"]) + _id_mapping = sender(_metrics._local_staging_file.parents[1], 1, 10, ["folders", "runs", "metrics"], throw_exceptions=True) time.sleep(1) # Get online version of metrics diff --git a/tests/unit/test_object_artifact.py b/tests/unit/test_object_artifact.py index 0dfb5af1..b45eef1a 100644 --- a/tests/unit/test_object_artifact.py +++ b/tests/unit/test_object_artifact.py @@ -63,7 +63,7 @@ def test_object_artifact_creation_offline(offline_cache_setup) -> None: assert _local_data.get("mime_type") == "application/vnd.simvue.numpy.v1" assert _local_data.get("runs") == {_run.id: "input"} - _id_mapping = sender(pathlib.Path(offline_cache_setup.name), 1, 10) + _id_mapping = sender(pathlib.Path(offline_cache_setup.name), 1, 10, throw_exceptions=True) time.sleep(1) _online_artifact = Artifact(_id_mapping.get(_artifact.id)) diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index 366dc7c9..395b658a 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -41,7 +41,7 @@ def test_run_creation_offline(offline_cache_setup) -> None: assert _local_data.get("name") == f"simvue_offline_run_{_uuid}" assert _local_data.get("folder") == _folder_name - sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"]) + sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"], throw_exceptions=True) time.sleep(1) # Get online ID and retrieve run @@ -119,7 +119,7 @@ def test_run_modification_offline(offline_cache_setup) -> None: assert _new_run.description == "Simvue test run" assert _new_run.name == "simvue_test_run" - sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"]) + sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"], throw_exceptions=True) time.sleep(1) # Get online ID and retrieve run @@ -139,7 +139,7 @@ def test_run_modification_offline(offline_cache_setup) -> None: _online_run.refresh() assert _online_run.tags == [] - sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"]) + sender(_run._local_staging_file.parents[1], 1, 10, ["folders", "runs"], throw_exceptions=True) time.sleep(1) _online_run.refresh() diff --git a/tests/unit/test_s3_storage.py b/tests/unit/test_s3_storage.py index 8fc96b45..2246e98a 100644 --- a/tests/unit/test_s3_storage.py +++ b/tests/unit/test_s3_storage.py @@ -71,7 +71,7 @@ def test_create_s3_offline(offline_cache_setup) -> None: assert not _local_data.get("user", None) assert not _local_data.get("usage", None) - _id_mapping = sender(_storage._local_staging_file.parents[1], 1, 10, ["storage"]) + _id_mapping = sender(_storage._local_staging_file.parents[1], 1, 10, ["storage"], throw_exceptions=True) _online_id = _id_mapping[_storage.id] time.sleep(1) diff --git a/tests/unit/test_tag.py b/tests/unit/test_tag.py index 4959f3f7..381234d2 100644 --- a/tests/unit/test_tag.py +++ b/tests/unit/test_tag.py @@ -35,7 +35,7 @@ def test_tag_creation_offline(offline_cache_setup) -> None: assert _local_data.get("name") == f"test_tag_{_uuid}" - _id_mapping = sender(_tag._local_staging_file.parents[1], 1, 10, ["tags"]) + _id_mapping = sender(_tag._local_staging_file.parents[1], 1, 10, ["tags"], throw_exceptions=True) time.sleep(1) _online_id = _id_mapping.get(_tag.id) @@ -78,7 +78,7 @@ def test_tag_modification_offline(offline_cache_setup) -> None: assert _local_data.get("name") == f"test_tag_{_uuid}" - _id_mapping = sender(_tag._local_staging_file.parents[1], 1, 10, ["tags"]) + _id_mapping = sender(_tag._local_staging_file.parents[1], 1, 10, ["tags"], throw_exceptions=True) _online_id = _id_mapping.get(_tag.id) _online_tag = Tag(_online_id) @@ -101,7 +101,7 @@ def test_tag_modification_offline(offline_cache_setup) -> None: assert pydantic.color.parse_str(_local_data.get("colour")).r == 250 / 255 assert _local_data.get("description") == "modified test tag" - sender(_tag._local_staging_file.parents[1], 1, 10, ["tags"]) + sender(_tag._local_staging_file.parents[1], 1, 10, ["tags"], throw_exceptions=True) time.sleep(1) # Check online version is updated diff --git a/tests/unit/test_tenant.py b/tests/unit/test_tenant.py index 38e3f9e3..04684467 100644 --- a/tests/unit/test_tenant.py +++ b/tests/unit/test_tenant.py @@ -40,7 +40,7 @@ def test_create_tenant_offline(offline_cache_setup) -> None: assert _local_data.get("name") == _uuid assert _local_data.get("is_enabled") == True - _id_mapping = sender(_new_tenant._local_staging_file.parents[1], 1, 10, ["tenants"]) + _id_mapping = sender(_new_tenant._local_staging_file.parents[1], 1, 10, ["tenants"], throw_exceptions=True) time.sleep(1) _online_user = Tenant(_id_mapping.get(_new_tenant.id)) assert _online_user.name == _uuid diff --git a/tests/unit/test_user.py b/tests/unit/test_user.py index 5a23349a..a53f3cfd 100644 --- a/tests/unit/test_user.py +++ b/tests/unit/test_user.py @@ -62,7 +62,7 @@ def test_create_user_offline(offline_cache_setup) -> None: assert _local_data.get("fullname") == "Joe Bloggs" assert _local_data.get("email") == "jbloggs@simvue.io" - _id_mapping = sender(_user._local_staging_file.parents[1], 1, 10, ["users"]) + _id_mapping = sender(_user._local_staging_file.parents[1], 1, 10, ["users"], throw_exceptions=True) time.sleep(1) _online_user = User(_id_mapping.get(_user.id)) assert _online_user.username == "jbloggs" diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py index 7984ecce..f1f1acea 100644 --- a/tests/unit/test_user_alert.py +++ b/tests/unit/test_user_alert.py @@ -46,7 +46,7 @@ def test_user_alert_creation_offline(offline_cache_setup) -> None: assert _local_data.get("name") == f"users_alert_{_uuid}" assert _local_data.get("notification") == "none" - _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) time.sleep(1) _online_id = _alert._local_staging_file.parents[1].joinpath("server_ids", f"{_alert._local_staging_file.name.split('.')[0]}.txt").read_text() @@ -94,7 +94,7 @@ def test_user_alert_modification_offline(offline_cache_setup) -> None: ) _alert.commit() - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) time.sleep(1) # Get online ID and retrieve alert @@ -117,7 +117,7 @@ def test_user_alert_modification_offline(offline_cache_setup) -> None: with _alert._local_staging_file.open() as in_f: _local_data = json.load(in_f) assert _local_data.get("description") == "updated!" - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) time.sleep(1) _online_alert.refresh() @@ -191,7 +191,7 @@ def test_user_alert_status_offline(offline_cache_setup) -> None: _run.alerts = [_alert.id] _run.commit() - _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["folders", "runs", "alerts"]) + _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["folders", "runs", "alerts"], throw_exceptions=True) time.sleep(1) # Get online aler, check status is not set @@ -206,7 +206,7 @@ def test_user_alert_status_offline(offline_cache_setup) -> None: _online_alert.refresh() assert not _online_alert.get_status(run_id=_id_mapping.get(_run.id)) - sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"], throw_exceptions=True) time.sleep(1) # Check online status has been updated From 2fee8fd9f4ba8ae3704fec912d761792673ddc28 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 13 Oct 2025 11:19:13 +0100 Subject: [PATCH 5/6] Fix artifact test --- tests/unit/test_file_artifact.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/unit/test_file_artifact.py b/tests/unit/test_file_artifact.py index 2ea95157..ad736c3a 100644 --- a/tests/unit/test_file_artifact.py +++ b/tests/unit/test_file_artifact.py @@ -158,9 +158,8 @@ def test_file_artifact_creation_offline_updated(offline_cache_setup, caplog, sna out_f.write("File changed!") if not snapshot: - with caplog.at_level(logging.ERROR): + with pytest.raises(RuntimeError, match="The SHA256 you specified did not match the calculated checksum."): _id_mapping = sender(pathlib.Path(offline_cache_setup.name), 1, 10, throw_exceptions=True) - assert "The SHA256 you specified did not match the calculated checksum." in caplog.text return else: _id_mapping = sender(pathlib.Path(offline_cache_setup.name), 1, 10, throw_exceptions=True) From d1aa7ec333fab7a90b94c6a96398ad3561fc5952 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 13 Oct 2025 11:21:19 +0100 Subject: [PATCH 6/6] Fix offline grids validation --- simvue/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simvue/models.py b/simvue/models.py index c65a75c0..f294453c 100644 --- a/simvue/models.py +++ b/simvue/models.py @@ -96,7 +96,7 @@ class GridMetricSet(pydantic.BaseModel): time: pydantic.NonNegativeFloat | pydantic.NonNegativeInt timestamp: typing.Annotated[str | None, pydantic.BeforeValidator(simvue_timestamp)] step: pydantic.NonNegativeInt - array: list[float] | numpy.ndarray + array: list[float] | list[list[float]] | numpy.ndarray grid: str metric: str