1313import requests
1414import psutil
1515from simvue .config .user import SimvueConfiguration
16-
1716import simvue .api .objects
1817from simvue .api .objects .artifact .base import ArtifactBase
1918from simvue .eco .emissions_monitor import CO2Monitor
3736_logger = logging .getLogger (__name__ )
3837
3938
39+ def _log_upload_failed (file_path : pydantic .FilePath ) -> None :
40+ """Record that an object failed to upload in the object offline cache file.
41+
42+ Parameters
43+ ----------
44+ file_path : pydantic.FilePath
45+ The path to the offline cache file for the object
46+ """
47+ with file_path .open ("r" ) as file :
48+ _data = json .load (file )
49+ _data ["upload_failed" ] = True
50+ with file_path .open ("w" ) as file :
51+ json .dump (_data , file )
52+
53+
4054def upload_cached_file (
4155 cache_dir : pydantic .DirectoryPath ,
4256 obj_type : str ,
4357 file_path : pydantic .FilePath ,
4458 id_mapping : dict [str , str ],
59+ retry_failed_uploads : bool ,
4560 lock : threading .Lock ,
46- ):
61+ ) -> None :
4762 """Upload data stored in a cached file to the Simvue server.
4863
4964 Parameters
@@ -62,41 +77,53 @@ def upload_cached_file(
6277 _current_id = file_path .name .split ("." )[0 ]
6378 _data = json .load (file_path .open ())
6479 _exact_type : str = _data .pop ("obj_type" )
80+
81+ if _data .pop ("upload_failed" , False ) and not retry_failed_uploads :
82+ return
83+
6584 try :
6685 _instance_class = getattr (simvue .api .objects , _exact_type )
67- except AttributeError as e :
68- raise RuntimeError (f"Attempt to initialise unknown type '{ _exact_type } '" ) from e
86+ except AttributeError :
87+ _logger .error (f"Attempt to initialise unknown type '{ _exact_type } '" )
88+ _log_upload_failed (file_path )
89+ return
6990
7091 # If it is an ObjectArtifact, need to load the object as bytes from a different file
7192 if issubclass (_instance_class , simvue .api .objects .ObjectArtifact ):
7293 with open (file_path .parent .joinpath (f"{ _current_id } .object" ), "rb" ) as file :
7394 _data ["serialized" ] = file .read ()
95+ try :
96+ # We want to reconnect if there is an online ID stored for this file
97+ if _online_id := id_mapping .get (_current_id ):
98+ obj_for_upload = _instance_class (
99+ identifier = _online_id , _read_only = False , ** _data
100+ )
101+ else :
102+ obj_for_upload = _instance_class .new (** _data )
74103
75- # We want to reconnect if there is an online ID stored for this file
76- if _online_id := id_mapping .get (_current_id ):
77- obj_for_upload = _instance_class (
78- identifier = _online_id , _read_only = False , ** _data
79- )
80- else :
81- obj_for_upload = _instance_class .new (** _data )
82-
83- with lock :
84- obj_for_upload .on_reconnect (id_mapping )
104+ with lock :
105+ obj_for_upload .on_reconnect (id_mapping )
85106
86- try :
87107 if not issubclass (_instance_class , ArtifactBase ):
88108 obj_for_upload .commit ()
89109 _new_id = obj_for_upload .id
90- except RuntimeError as error :
110+
111+ except Exception as error :
91112 if "status 409" in error .args [0 ]:
92113 return
93- raise error
94- if not _new_id :
95- raise RuntimeError (
96- f"Object of type '{ obj_for_upload .__class__ .__name__ } ' has no identifier"
114+
115+ _logger .error (
116+ f"Error while committing '{ _instance_class .__name__ } ': { error .args [0 ]} "
97117 )
118+ _log_upload_failed (file_path )
119+ return
120+ if not _new_id :
121+ _logger .error (f"Object of type '{ _instance_class .__name__ } ' has no identifier" )
122+ _log_upload_failed (file_path )
123+ return
124+
98125 _logger .info (
99- f"{ 'Updated' if id_mapping .get (_current_id ) else 'Created' } { obj_for_upload . __class__ .__name__ } '{ _new_id } '"
126+ f"{ 'Updated' if id_mapping .get (_current_id ) else 'Created' } { _instance_class .__name__ } '{ _new_id } '"
100127 )
101128
102129 file_path .unlink (missing_ok = True )
@@ -155,6 +182,7 @@ def sender(
155182 max_workers : int = 5 ,
156183 threading_threshold : int = 10 ,
157184 objects_to_upload : list [str ] = UPLOAD_ORDER ,
185+ retry_failed_uploads : bool = False ,
158186) -> dict [str , str ]:
159187 """Send data from a local cache directory to the Simvue server.
160188
@@ -168,6 +196,8 @@ def sender(
168196 The number of cached files above which threading will be used
169197 objects_to_upload : list[str]
170198 Types of objects to upload, by default uploads all types of objects present in cache
199+ retry_failed_uploads : bool, optional
200+ Whether to retry sending objects which previously failed, by default False
171201
172202 Returns
173203 -------
@@ -203,7 +233,14 @@ def sender(
203233 _offline_files = _all_offline_files [_obj_type ]
204234 if len (_offline_files ) < threading_threshold :
205235 for file_path in _offline_files :
206- upload_cached_file (cache_dir , _obj_type , file_path , _id_mapping , _lock )
236+ upload_cached_file (
237+ cache_dir = cache_dir ,
238+ obj_type = _obj_type ,
239+ file_path = file_path ,
240+ id_mapping = _id_mapping ,
241+ retry_failed_uploads = retry_failed_uploads ,
242+ lock = _lock ,
243+ )
207244 else :
208245 with ThreadPoolExecutor (
209246 max_workers = max_workers , thread_name_prefix = "sender_session_upload"
@@ -214,6 +251,7 @@ def sender(
214251 obj_type = _obj_type ,
215252 file_path = file_path ,
216253 id_mapping = _id_mapping ,
254+ retry_failed_uploads = retry_failed_uploads ,
217255 lock = _lock ,
218256 ),
219257 _offline_files ,
0 commit comments