diff --git a/src/murfey/client/analyser.py b/src/murfey/client/analyser.py index eb7e788b2..e43523dc5 100644 --- a/src/murfey/client/analyser.py +++ b/src/murfey/client/analyser.py @@ -19,6 +19,7 @@ from murfey.client.contexts.spa import SPAModularContext from murfey.client.contexts.spa_metadata import SPAMetadataContext from murfey.client.contexts.tomo import TomographyContext +from murfey.client.contexts.tomo_metadata import TomographyMetadataContext from murfey.client.instance_environment import MurfeyInstanceEnvironment from murfey.client.rsync import RSyncerUpdate, TransferResult from murfey.util.client import Observer, get_machine_config_client @@ -226,6 +227,13 @@ def _analyse(self): and not self._context ): self._context = SPAMetadataContext("epu", self._basepath) + elif ( + "Batch" in transferred_file.parts + or "SearchMaps" in transferred_file.parts + or transferred_file.name == "Session.dm" + and not self._context + ): + self._context = TomographyMetadataContext("tomo", self._basepath) self.post_transfer(transferred_file) else: dc_metadata = {} @@ -369,9 +377,10 @@ def _analyse(self): elif isinstance( self._context, ( - TomographyContext, SPAModularContext, SPAMetadataContext, + TomographyContext, + TomographyMetadataContext, ), ): context = str(self._context).split(" ")[0].split(".")[-1] diff --git a/src/murfey/client/contexts/tomo_metadata.py b/src/murfey/client/contexts/tomo_metadata.py new file mode 100644 index 000000000..a93a7b644 --- /dev/null +++ b/src/murfey/client/contexts/tomo_metadata.py @@ -0,0 +1,324 @@ +import logging +from pathlib import Path +from typing import Optional + +import requests +import xmltodict + +from murfey.client.context import Context +from murfey.client.contexts.spa import _file_transferred_to, _get_source +from murfey.client.contexts.spa_metadata import _atlas_destination +from murfey.client.instance_environment import MurfeyInstanceEnvironment, SampleInfo +from murfey.util.api import url_path_for +from murfey.util.client import authorised_requests, capture_post + +logger = logging.getLogger("murfey.client.contexts.tomo_metadata") + +requests.get, requests.post, requests.put, requests.delete = authorised_requests() + + +def ensure_dcg_exists(transferred_file: Path, environment: MurfeyInstanceEnvironment): + # Make sure we have a data collection group + source = _get_source(transferred_file, environment=environment) + if not source: + return None + dcg_tag = str(source).replace(f"/{environment.visit}", "") + url = f"{str(environment.url.geturl())}{url_path_for('workflow.router', 'register_dc_group', visit_name=environment.visit, session_id=environment.murfey_session)}" + dcg_data = { + "experiment_type": "single particle", + "experiment_type_id": 37, + "tag": dcg_tag, + } + capture_post(url, json=dcg_data) + return dcg_tag + + +class TomographyMetadataContext(Context): + def __init__(self, acquisition_software: str, basepath: Path): + super().__init__("Tomography_metadata", acquisition_software) + self._basepath = basepath + + def post_transfer( + self, + transferred_file: Path, + environment: Optional[MurfeyInstanceEnvironment] = None, + **kwargs, + ): + super().post_transfer( + transferred_file=transferred_file, + environment=environment, + **kwargs, + ) + + if transferred_file.name == "Session.dm" and environment: + logger.info("Tomography session metadata found") + with open(transferred_file, "r") as session_xml: + session_data = xmltodict.parse(session_xml.read()) + + windows_path = session_data["TomographySession"]["AtlasId"] + logger.info(f"Windows path to atlas metadata found: {windows_path}") + visit_index = windows_path.split("\\").index(environment.visit) + partial_path = "/".join(windows_path.split("\\")[visit_index + 1 :]) + logger.info("Partial Linux path successfully constructed from Windows path") + + source = _get_source(transferred_file, environment) + if not source: + logger.warning( + f"Source could not be identified for {str(transferred_file)}" + ) + return + + source_visit_dir = source.parent + + logger.info( + f"Looking for atlas XML file in metadata directory {str((source_visit_dir / partial_path).parent)}" + ) + atlas_xml_path = list( + (source_visit_dir / partial_path).parent.glob("Atlas_*.xml") + )[0] + logger.info(f"Atlas XML path {str(atlas_xml_path)} found") + with open(atlas_xml_path, "rb") as atlas_xml: + atlas_xml_data = xmltodict.parse(atlas_xml) + atlas_pixel_size = float( + atlas_xml_data["MicroscopeImage"]["SpatialScale"]["pixelSize"]["x"][ + "numericValue" + ] + ) + + for p in partial_path.split("/"): + if p.startswith("Sample"): + sample = int(p.replace("Sample", "")) + break + else: + logger.warning(f"Sample could not be identified for {transferred_file}") + return + environment.samples[source] = SampleInfo( + atlas=Path(partial_path), sample=sample + ) + url = f"{str(environment.url.geturl())}{url_path_for('workflow.router', 'register_dc_group', visit_name=environment.visit, session_id=environment.murfey_session)}" + dcg_tag = "/".join( + p for p in transferred_file.parent.parts if p != environment.visit + ).replace("//", "/") + dcg_data = { + "experiment_type": "tomo", + "experiment_type_id": 36, + "tag": dcg_tag, + "atlas": str( + _atlas_destination(environment, source, transferred_file) + / environment.samples[source].atlas.parent + / atlas_xml_path.with_suffix(".jpg").name + ), + "sample": environment.samples[source].sample, + "atlas_pixel_size": atlas_pixel_size, + } + capture_post(url, json=dcg_data) + + elif transferred_file.name == "SearchMap.xml" and environment: + logger.info("Tomography session search map xml found") + dcg_tag = ensure_dcg_exists(transferred_file, environment) + with open(transferred_file, "r") as sm_xml: + sm_data = xmltodict.parse(sm_xml.read()) + + # This bit gets SearchMap location on Atlas + sm_pixel_size = float( + sm_data["MicroscopeImage"]["SpatialScale"]["pixelSize"]["x"][ + "numericValue" + ] + ) + stage_position = sm_data["MicroscopeImage"]["microscopeData"]["stage"][ + "Position" + ] + sm_binning = float( + sm_data["MicroscopeImage"]["microscopeData"]["acquisition"]["camera"][ + "Binning" + ]["a:x"] + ) + + # Get the stage transformation + sm_transformations = sm_data["MicroscopeImage"]["CustomData"][ + "a:KeyValueOfstringanyType" + ] + stage_matrix: dict[str, float] = {} + image_matrix: dict[str, float] = {} + for key_val in sm_transformations: + if key_val["a:Key"] == "ReferenceCorrectionForStage": + stage_matrix = { + "m11": float(key_val["a:Value"]["b:_m11"]), + "m12": float(key_val["a:Value"]["b:_m12"]), + "m21": float(key_val["a:Value"]["b:_m21"]), + "m22": float(key_val["a:Value"]["b:_m22"]), + } + elif key_val["a:Key"] == "ReferenceCorrectionForImageShift": + image_matrix = { + "m11": float(key_val["a:Value"]["b:_m11"]), + "m12": float(key_val["a:Value"]["b:_m12"]), + "m21": float(key_val["a:Value"]["b:_m21"]), + "m22": float(key_val["a:Value"]["b:_m22"]), + } + if not stage_matrix or not image_matrix: + logger.error( + f"No stage or image shift matrix found for {transferred_file}" + ) + + ref_matrix = { + "m11": float( + sm_data["MicroscopeImage"]["ReferenceTransformation"]["matrix"][ + "a:_m11" + ] + ), + "m12": float( + sm_data["MicroscopeImage"]["ReferenceTransformation"]["matrix"][ + "a:_m12" + ] + ), + "m21": float( + sm_data["MicroscopeImage"]["ReferenceTransformation"]["matrix"][ + "a:_m21" + ] + ), + "m22": float( + sm_data["MicroscopeImage"]["ReferenceTransformation"]["matrix"][ + "a:_m22" + ] + ), + } + + source = _get_source(transferred_file, environment=environment) + image_path = ( + _file_transferred_to( + environment, source, transferred_file.parent / "SearchMap.jpg" + ) + if source + else "" + ) + + sm_url = f"{str(environment.url.geturl())}{url_path_for('session_control.tomo_router', 'register_search_map', session_id=environment.murfey_session, sm_name=transferred_file.parent.name)}" + capture_post( + sm_url, + json={ + "tag": dcg_tag, + "x_stage_position": float(stage_position["X"]), + "y_stage_position": float(stage_position["Y"]), + "pixel_size": sm_pixel_size, + "image": str(image_path), + "binning": sm_binning, + "reference_matrix": ref_matrix, + "stage_correction": stage_matrix, + "image_shift_correction": image_matrix, + }, + ) + + elif transferred_file.name == "SearchMap.dm" and environment: + logger.info("Tomography session search map dm found") + dcg_tag = ensure_dcg_exists(transferred_file, environment) + with open(transferred_file, "r") as sm_xml: + sm_data = xmltodict.parse(sm_xml.read()) + + # This bit gets SearchMap size + try: + sm_width = int(sm_data["TileSetXml"]["ImageSize"]["a:width"]) + sm_height = int(sm_data["TileSetXml"]["ImageSize"]["a:height"]) + except KeyError: + logger.warning(f"Unable to find size for SearchMap {transferred_file}") + readout_width = int( + sm_data["TileSetXml"]["AcquisitionSettings"]["a:camera"][ + "a:ReadoutArea" + ]["b:width"] + ) + readout_height = int( + sm_data["TileSetXml"]["AcquisitionSettings"]["a:camera"][ + "a:ReadoutArea" + ]["b:height"] + ) + sm_width = int( + 8005 * readout_width / max(readout_height, readout_width) + ) + sm_height = int( + 8005 * readout_height / max(readout_height, readout_width) + ) + logger.warning( + f"Inserting incorrect width {sm_width}, height {sm_height} for SearchMap display" + ) + + sm_url = f"{str(environment.url.geturl())}{url_path_for('session_control.tomo_router', 'register_search_map', session_id=environment.murfey_session, sm_name=transferred_file.parent.name)}" + capture_post( + sm_url, + json={ + "tag": dcg_tag, + "height": sm_height, + "width": sm_width, + }, + ) + + elif transferred_file.name == "BatchPositionsList.xml" and environment: + logger.info("Tomography session batch positions list found") + dcg_tag = ensure_dcg_exists(transferred_file, environment) + with open(transferred_file) as xml: + for_parsing = xml.read() + batch_xml = xmltodict.parse(for_parsing) + + batch_positions_list = batch_xml["BatchPositionsList"]["BatchPositions"][ + "BatchPositionParameters" + ] + if isinstance(batch_positions_list, dict): + # Case of a single batch + batch_positions_list = [batch_positions_list] + + for batch_position in batch_positions_list: + batch_name = batch_position["Name"] + search_map_name = batch_position["PositionOnTileSet"]["TileSetName"] + batch_stage_location_x = float( + batch_position["PositionOnTileSet"]["StagePositionX"] + ) + batch_stage_location_y = float( + batch_position["PositionOnTileSet"]["StagePositionY"] + ) + + # Always need search map before batch position + sm_url = f"{str(environment.url.geturl())}{url_path_for('session_control.tomo_router', 'register_search_map', session_id=environment.murfey_session, sm_name=search_map_name)}" + capture_post( + sm_url, + json={ + "tag": dcg_tag, + }, + ) + + # Then register batch position + bp_url = f"{str(environment.url.geturl())}{url_path_for('session_control.tomo_router', 'register_batch_position', session_id=environment.murfey_session, batch_name=batch_name)}" + capture_post( + bp_url, + json={ + "tag": dcg_tag, + "x_stage_position": batch_stage_location_x, + "y_stage_position": batch_stage_location_y, + "x_beamshift": 0, + "y_beamshift": 0, + "search_map_name": search_map_name, + }, + ) + + # Beamshifts + if batch_position.get("AdditionalExposureTemplateAreas"): + beamshifts = batch_position["AdditionalExposureTemplateAreas"][ + "ExposureTemplateAreaParameters" + ] + if type(beamshifts) is dict: + beamshifts = [beamshifts] + for beamshift in beamshifts: + beamshift_name = beamshift["Name"] + beamshift_position_x = float(beamshift["PositionX"]) + beamshift_position_y = float(beamshift["PositionY"]) + + # Registration of beamshifted position + bp_url = f"{str(environment.url.geturl())}{url_path_for('session_control.tomo_router', 'register_batch_position', session_id=environment.murfey_session, batch_name=beamshift_name)}" + capture_post( + bp_url, + json={ + "tag": dcg_tag, + "x_stage_position": batch_stage_location_x, + "y_stage_position": batch_stage_location_y, + "x_beamshift": beamshift_position_x, + "y_beamshift": beamshift_position_y, + "search_map_name": search_map_name, + }, + ) diff --git a/src/murfey/server/api/session_control.py b/src/murfey/server/api/session_control.py index 8ed27901c..45b282eaf 100644 --- a/src/murfey/server/api/session_control.py +++ b/src/murfey/server/api/session_control.py @@ -46,10 +46,12 @@ Session, ) from murfey.util.models import ( + BatchPositionParameters, ClientInfo, FoilHoleParameters, GridSquareParameters, RsyncerInfo, + SearchMapParameters, Visit, ) from murfey.workflows.spa.flush_spa_preprocess import ( @@ -58,6 +60,10 @@ from murfey.workflows.spa.flush_spa_preprocess import ( register_grid_square as _register_grid_square, ) +from murfey.workflows.tomo.tomo_metadata import ( + register_batch_position_in_database, + register_search_map_in_database, +) logger = getLogger("murfey.server.api.session_control") @@ -364,6 +370,33 @@ def register_foil_hole( return _register_foil_hole(session_id, gs_name, foil_hole_params, db) +tomo_router = APIRouter( + prefix="/session_control/tomo", + dependencies=[Depends(validate_instrument_token)], + tags=["Session Control: CryoET"], +) + + +@tomo_router.post("/sessions/{session_id}/search_map/{sm_name}") +def register_search_map( + session_id: MurfeySessionID, + sm_name: str, + search_map_params: SearchMapParameters, + db=murfey_db, +): + return register_search_map_in_database(session_id, sm_name, search_map_params, db) + + +@tomo_router.post("/sessions/{session_id}/batch_position/{batch_name}") +def register_batch_position( + session_id: MurfeySessionID, + batch_name: str, + batch_params: BatchPositionParameters, + db=murfey_db, +): + return register_batch_position_in_database(session_id, batch_name, batch_params, db) + + correlative_router = APIRouter( prefix="/session_control/correlative", dependencies=[Depends(validate_instrument_token)], diff --git a/src/murfey/server/api/workflow.py b/src/murfey/server/api/workflow.py index d1714cc51..af9c314f0 100644 --- a/src/murfey/server/api/workflow.py +++ b/src/murfey/server/api/workflow.py @@ -50,6 +50,7 @@ Movie, PreprocessStash, ProcessingJob, + SearchMap, Session, SessionProcessingParameters, SPAFeedbackParameters, @@ -57,13 +58,18 @@ Tilt, TiltSeries, ) -from murfey.util.models import ProcessingParametersSPA, ProcessingParametersTomo +from murfey.util.models import ( + ProcessingParametersSPA, + ProcessingParametersTomo, + SearchMapParameters, +) from murfey.util.processing_params import ( cryolo_model_path, default_spa_parameters, motion_corrected_mrc, ) from murfey.util.tomo import midpoint +from murfey.workflows.tomo.tomo_metadata import register_search_map_in_database logger = getLogger("murfey.server.api.workflow") @@ -132,6 +138,18 @@ def register_dc_group( dcg_murfey[0].atlas_id = atlas_id_response["return_value"] db.add(dcg_murfey[0]) db.commit() + + search_maps = db.exec( + select(SearchMap) + .where(SearchMap.session_id == session_id) + .where(SearchMap.tag == dcg_params.tag) + ).all() + search_map_params = SearchMapParameters(tag=dcg_params.tag) + for sm in search_maps: + register_search_map_in_database( + session_id, sm.name, search_map_params, db, close_db=False + ) + db.close() else: dcg_parameters = { "start_time": str(datetime.now()), @@ -813,6 +831,9 @@ def register_completed_tilt_series( "pixel_size": preproc_params.pixel_size, "manual_tilt_offset": -tilt_offset, "node_creator_queue": machine_config.node_creator_queue, + "search_map_id": ts.search_map_id, + "x_location": ts.x_location, + "y_location": ts.y_location, }, } if _transport_object: diff --git a/src/murfey/server/feedback.py b/src/murfey/server/feedback.py index 80108bd87..24b67a2e2 100644 --- a/src/murfey/server/feedback.py +++ b/src/murfey/server/feedback.py @@ -1937,6 +1937,9 @@ def feedback_callback(header: dict, message: dict) -> None: "pixel_size": preproc_params.pixel_size, "manual_tilt_offset": -tilt_offset, "node_creator_queue": machine_config.node_creator_queue, + "search_map_id": relevant_tilt_series.search_map_id, + "x_location": relevant_tilt_series.x_location, + "y_location": relevant_tilt_series.y_location, }, } if murfey.server._transport_object: diff --git a/src/murfey/server/ispyb.py b/src/murfey/server/ispyb.py index 24f8094ce..439ce7795 100644 --- a/src/murfey/server/ispyb.py +++ b/src/murfey/server/ispyb.py @@ -32,7 +32,11 @@ from murfey.util import sanitise from murfey.util.config import get_security_config -from murfey.util.models import FoilHoleParameters, GridSquareParameters +from murfey.util.models import ( + FoilHoleParameters, + GridSquareParameters, + SearchMapParameters, +) log = logging.getLogger("murfey.server.ispyb") security_config = get_security_config() @@ -388,6 +392,90 @@ def do_update_foil_hole( ) return {"success": False, "return_value": None} + def do_insert_search_map( + self, + atlas_id: int, + search_map_parameters: SearchMapParameters, + ): + if ( + search_map_parameters.pixel_size + and search_map_parameters.height + and search_map_parameters.height_on_atlas + ): + search_map_parameters.pixel_size *= ( + search_map_parameters.height / search_map_parameters.height_on_atlas + ) + record = GridSquare( + atlasId=atlas_id, + gridSquareImage=search_map_parameters.image, + pixelLocationX=search_map_parameters.x_location, + pixelLocationY=search_map_parameters.y_location, + height=search_map_parameters.height_on_atlas, + width=search_map_parameters.width_on_atlas, + stageLocationX=search_map_parameters.x_stage_position, + stageLocationY=search_map_parameters.y_stage_position, + pixelSize=search_map_parameters.pixel_size, + ) + try: + with ISPyBSession() as db: + db.add(record) + db.commit() + log.info(f"Created SearchMap (GridSquare) {record.gridSquareId}") + return {"success": True, "return_value": record.gridSquareId} + except ispyb.ISPyBException as e: + log.error( + "Inserting SearchMap (GridSquare) entry caused exception '%s'.", + e, + exc_info=True, + ) + return {"success": False, "return_value": None} + + def do_update_search_map( + self, search_map_id, search_map_parameters: SearchMapParameters + ): + try: + with ISPyBSession() as db: + grid_square = ( + db.query(GridSquare) + .filter(GridSquare.gridSquareId == search_map_id) + .one() + ) + if ( + search_map_parameters.pixel_size + and search_map_parameters.height + and search_map_parameters.height_on_atlas + ): + search_map_parameters.pixel_size *= ( + search_map_parameters.height + / search_map_parameters.height_on_atlas + ) + if search_map_parameters.image: + grid_square.gridSquareImage = search_map_parameters.image + if search_map_parameters.x_location: + grid_square.pixelLocationX = search_map_parameters.x_location + if search_map_parameters.y_location: + grid_square.pixelLocationY = search_map_parameters.y_location + if search_map_parameters.height_on_atlas: + grid_square.height = search_map_parameters.height_on_atlas + if search_map_parameters.width_on_atlas: + grid_square.width = search_map_parameters.width_on_atlas + if search_map_parameters.x_stage_position: + grid_square.stageLocationX = search_map_parameters.x_stage_position + if search_map_parameters.y_stage_position: + grid_square.stageLocationY = search_map_parameters.y_stage_position + if search_map_parameters.pixel_size: + grid_square.pixelSize = search_map_parameters.pixel_size + db.add(grid_square) + db.commit() + return {"success": True, "return_value": grid_square.gridSquareId} + except ispyb.ISPyBException as e: + log.error( + "Updating SearchMap (GridSquare) entry caused exception '%s'.", + e, + exc_info=True, + ) + return {"success": False, "return_value": None} + def send(self, queue: str, message: dict, new_connection: bool = False): if self.transport: if not self.transport.is_connected(): diff --git a/src/murfey/server/main.py b/src/murfey/server/main.py index b3fd3e3c1..94fc5824b 100644 --- a/src/murfey/server/main.py +++ b/src/murfey/server/main.py @@ -81,6 +81,7 @@ class Settings(BaseSettings): app.include_router(murfey.server.api.session_control.router) app.include_router(murfey.server.api.session_control.spa_router) +app.include_router(murfey.server.api.session_control.tomo_router) app.include_router(murfey.server.api.session_info.router) app.include_router(murfey.server.api.session_info.correlative_router) diff --git a/src/murfey/util/db.py b/src/murfey/util/db.py index 8d43b98c6..8dee78fcf 100644 --- a/src/murfey/util/db.py +++ b/src/murfey/util/db.py @@ -96,6 +96,9 @@ class Session(SQLModel, table=True): # type: ignore foil_holes: List["FoilHole"] = Relationship( back_populates="session", sa_relationship_kwargs={"cascade": "delete"} ) + search_maps: List["SearchMap"] = Relationship( + back_populates="session", sa_relationship_kwargs={"cascade": "delete"} + ) rsync_instances: List[RsyncInstance] = Relationship( back_populates="session", sa_relationship_kwargs={"cascade": "delete"} ) @@ -346,15 +349,23 @@ class SessionProcessingParameters(SQLModel, table=True): # type: ignore class TiltSeries(SQLModel, table=True): # type: ignore id: int = Field(primary_key=True) + ispyb_id: Optional[int] = None tag: str rsync_source: str session_id: int = Field(foreign_key="session.id") + search_map_id: Optional[int] = Field( + foreign_key="searchmap.id", + default=None, + ) tilt_series_length: int = -1 processing_requested: bool = False + x_location: Optional[float] = None + y_location: Optional[float] = None session: Optional[Session] = Relationship(back_populates="tilt_series") tilts: List["Tilt"] = Relationship( back_populates="tilt_series", sa_relationship_kwargs={"cascade": "delete"} ) + search_map: Optional["SearchMap"] = Relationship(back_populates="tilt_series") class Tilt(SQLModel, table=True): # type: ignore @@ -601,6 +612,38 @@ class FoilHole(SQLModel, table=True): # type: ignore ) +class SearchMap(SQLModel, table=True): # type: ignore + id: Optional[int] = Field(primary_key=True, default=None) + session_id: int = Field(foreign_key="session.id") + name: str + tag: str + x_location: Optional[float] = None + y_location: Optional[float] = None + x_stage_position: Optional[float] = None + y_stage_position: Optional[float] = None + pixel_size: Optional[float] = None + image: str = "" + binning: Optional[float] = None + reference_matrix_m11: Optional[float] = None + reference_matrix_m12: Optional[float] = None + reference_matrix_m21: Optional[float] = None + reference_matrix_m22: Optional[float] = None + stage_correction_m11: Optional[float] = None + stage_correction_m12: Optional[float] = None + stage_correction_m21: Optional[float] = None + stage_correction_m22: Optional[float] = None + image_shift_correction_m11: Optional[float] = None + image_shift_correction_m12: Optional[float] = None + image_shift_correction_m21: Optional[float] = None + image_shift_correction_m22: Optional[float] = None + width: Optional[int] = None + height: Optional[int] = None + session: Optional[Session] = Relationship(back_populates="search_maps") + tilt_series: List["TiltSeries"] = Relationship( + back_populates="search_map", sa_relationship_kwargs={"cascade": "delete"} + ) + + class Movie(SQLModel, table=True): # type: ignore murfey_id: int = Field(primary_key=True, foreign_key="murfeyledger.id") foil_hole_id: int = Field(foreign_key="foilhole.id", nullable=True, default=None) diff --git a/src/murfey/util/models.py b/src/murfey/util/models.py index de6f3bd43..db55d83ae 100644 --- a/src/murfey/util/models.py +++ b/src/murfey/util/models.py @@ -146,6 +146,33 @@ class FoilHoleParameters(BaseModel): diameter: Optional[float] = None +class SearchMapParameters(BaseModel): + tag: str + x_location: Optional[float] = None + y_location: Optional[float] = None + x_stage_position: Optional[float] = None + y_stage_position: Optional[float] = None + pixel_size: Optional[float] = None + image: Optional[str] = None + binning: Optional[float] = None + reference_matrix: Dict[str, float] = {} + stage_correction: Dict[str, float] = {} + image_shift_correction: Dict[str, float] = {} + height: Optional[int] = None + width: Optional[int] = None + height_on_atlas: Optional[int] = None + width_on_atlas: Optional[int] = None + + +class BatchPositionParameters(BaseModel): + tag: str + x_stage_position: float + y_stage_position: float + x_beamshift: float + y_beamshift: float + search_map_name: str + + class MultigridWatcherSetup(BaseModel): source: Path skip_existing_processing: bool = False diff --git a/src/murfey/util/route_manifest.yaml b/src/murfey/util/route_manifest.yaml index e8ca73ef7..03bfb2af7 100644 --- a/src/murfey/util/route_manifest.yaml +++ b/src/murfey/util/route_manifest.yaml @@ -818,6 +818,21 @@ murfey.server.api.session_control.spa_router: type: int methods: - POST +murfey.server.api.session_control.tomo_router: + - path: /session_control/tomo/sessions/{session_id}/search_map/{sm_name} + function: register_search_map + path_params: + - name: sm_name + type: str + methods: + - POST + - path: /session_control/tomo/sessions/{session_id}/batch_position/{batch_name} + function: register_batch_position + path_params: + - name: batch_name + type: str + methods: + - POST murfey.server.api.session_info.correlative_router: - path: /session_info/correlative/sessions/{session_id}/upstream_visits function: find_upstream_visits diff --git a/src/murfey/workflows/tomo/tomo_metadata.py b/src/murfey/workflows/tomo/tomo_metadata.py new file mode 100644 index 000000000..02c1c60c9 --- /dev/null +++ b/src/murfey/workflows/tomo/tomo_metadata.py @@ -0,0 +1,354 @@ +import logging + +import numpy as np +from sqlmodel import Session, select + +from murfey.server import _transport_object +from murfey.server.api.auth import MurfeySessionIDInstrument as MurfeySessionID +from murfey.server.gain import Camera +from murfey.util import sanitise +from murfey.util.config import get_machine_config +from murfey.util.db import DataCollectionGroup, SearchMap +from murfey.util.db import Session as MurfeySession +from murfey.util.db import TiltSeries +from murfey.util.models import BatchPositionParameters, SearchMapParameters + +logger = logging.getLogger("murfey.client.util.tomo_metadata") + + +def register_search_map_in_database( + session_id: MurfeySessionID, + search_map_name: str, + search_map_params: SearchMapParameters, + murfey_db: Session, + close_db: bool = True, +): + dcg = murfey_db.exec( + select(DataCollectionGroup) + .where(DataCollectionGroup.session_id == session_id) + .where(DataCollectionGroup.tag == search_map_params.tag) + ).one() + try: + # See if there is already a search map with this name and update if so + search_map = murfey_db.exec( + select(SearchMap) + .where(SearchMap.name == search_map_name) + .where(SearchMap.tag == search_map_params.tag) + .where(SearchMap.session_id == session_id) + ).one() + search_map.x_stage_position = ( + search_map_params.x_stage_position or search_map.x_stage_position + ) + search_map.y_stage_position = ( + search_map_params.y_stage_position or search_map.y_stage_position + ) + search_map.pixel_size = search_map_params.pixel_size or search_map.pixel_size + search_map.image = search_map_params.image or search_map.image + search_map.binning = search_map_params.binning or search_map.binning + search_map.reference_matrix_m11 = ( + search_map_params.reference_matrix.get("m11") + or search_map.reference_matrix_m11 + ) + search_map.reference_matrix_m12 = ( + search_map_params.reference_matrix.get("m12") + or search_map.reference_matrix_m12 + ) + search_map.reference_matrix_m21 = ( + search_map_params.reference_matrix.get("m21") + or search_map.reference_matrix_m21 + ) + search_map.reference_matrix_m22 = ( + search_map_params.reference_matrix.get("m22") + or search_map.reference_matrix_m22 + ) + search_map.stage_correction_m11 = ( + search_map_params.stage_correction.get("m11") + or search_map.stage_correction_m11 + ) + search_map.stage_correction_m12 = ( + search_map_params.stage_correction.get("m12") + or search_map.stage_correction_m12 + ) + search_map.stage_correction_m21 = ( + search_map_params.stage_correction.get("m21") + or search_map.stage_correction_m21 + ) + search_map.stage_correction_m22 = ( + search_map_params.stage_correction.get("m22") + or search_map.stage_correction_m22 + ) + search_map.image_shift_correction_m11 = ( + search_map_params.image_shift_correction.get("m11") + or search_map.image_shift_correction_m11 + ) + search_map.image_shift_correction_m12 = ( + search_map_params.image_shift_correction.get("m12") + or search_map.image_shift_correction_m12 + ) + search_map.image_shift_correction_m21 = ( + search_map_params.image_shift_correction.get("m21") + or search_map.image_shift_correction_m21 + ) + search_map.image_shift_correction_m22 = ( + search_map_params.image_shift_correction.get("m22") + or search_map.image_shift_correction_m22 + ) + search_map.height = search_map_params.height or search_map.height + search_map.width = search_map_params.width or search_map.width + if _transport_object: + _transport_object.do_update_search_map(search_map.id, search_map_params) + except Exception as e: + logger.info(f"Registering new search map due to {e}", exc_info=True) + if _transport_object: + sm_ispyb_response = _transport_object.do_insert_search_map( + dcg.atlas_id, search_map_params + ) + else: + # mock up response so that below still works + sm_ispyb_response = {"success": False, "return_value": None} + # Register new search map + search_map = SearchMap( + id=( + sm_ispyb_response["return_value"] + if sm_ispyb_response["success"] + else None + ), + name=search_map_name, + session_id=session_id, + tag=search_map_params.tag, + x_stage_position=search_map_params.x_stage_position, + y_stage_position=search_map_params.y_stage_position, + pixel_size=search_map_params.pixel_size, + image=search_map_params.image, + binning=search_map_params.binning, + reference_matrix_m11=search_map_params.reference_matrix.get("m11"), + reference_matrix_m12=search_map_params.reference_matrix.get("m12"), + reference_matrix_m21=search_map_params.reference_matrix.get("m21"), + reference_matrix_m22=search_map_params.reference_matrix.get("m22"), + stage_correction_m11=search_map_params.stage_correction.get("m11"), + stage_correction_m12=search_map_params.stage_correction.get("m12"), + stage_correction_m21=search_map_params.stage_correction.get("m21"), + stage_correction_m22=search_map_params.stage_correction.get("m22"), + image_shift_correction_m11=search_map_params.image_shift_correction.get( + "m11" + ), + image_shift_correction_m12=search_map_params.image_shift_correction.get( + "m12" + ), + image_shift_correction_m21=search_map_params.image_shift_correction.get( + "m21" + ), + image_shift_correction_m22=search_map_params.image_shift_correction.get( + "m22" + ), + height=search_map_params.height, + width=search_map_params.width, + ) + + murfey_session = murfey_db.exec( + select(MurfeySession).where(MurfeySession.id == session_id) + ).one() + machine_config = get_machine_config(instrument_name=murfey_session.instrument_name)[ + murfey_session.instrument_name + ] + if all( + [ + search_map.reference_matrix_m11, + search_map.stage_correction_m11, + search_map.x_stage_position, + search_map.y_stage_position, + search_map.pixel_size, + search_map.height, + search_map.width, + dcg.atlas_pixel_size, + ] + ): + # Work out the shifted positions if all required information is present + reference_shift_matrix = np.array( + [ + [ + search_map.reference_matrix_m11, + search_map.reference_matrix_m12, + ], + [ + search_map.reference_matrix_m21, + search_map.reference_matrix_m22, + ], + ] + ) + stage_vector = np.array( + [search_map.x_stage_position, search_map.y_stage_position] + ) + stage_correction_matrix = np.array( + [ + [ + search_map.stage_correction_m11, + search_map.stage_correction_m12, + ], + [ + search_map.stage_correction_m21, + search_map.stage_correction_m22, + ], + ] + ) + corrected_vector = np.matmul( + np.linalg.inv(reference_shift_matrix), + np.matmul( + stage_correction_matrix, np.matmul(reference_shift_matrix, stage_vector) + ), + ) + + # Flip positions based on camera type + camera = getattr(Camera, machine_config.camera) + if camera == Camera.K3_FLIPY: + corrected_vector = np.matmul(np.array([[1, 0], [0, -1]]), corrected_vector) + elif camera == Camera.K3_FLIPX: + corrected_vector = np.matmul(np.array([[-1, 0], [0, 1]]), corrected_vector) + + # Convert from metres to pixels + search_map_params.height_on_atlas = int( + search_map.height * search_map.pixel_size / dcg.atlas_pixel_size + ) + search_map_params.width_on_atlas = int( + search_map.width * search_map.pixel_size / dcg.atlas_pixel_size + ) + search_map_params.x_location = float( + corrected_vector[0] / dcg.atlas_pixel_size + 2003 + ) + search_map_params.y_location = float( + corrected_vector[1] / dcg.atlas_pixel_size + 2003 + ) + search_map.x_location = search_map_params.x_location + search_map.y_location = search_map_params.y_location + if _transport_object: + _transport_object.do_update_search_map(search_map.id, search_map_params) + else: + logger.info( + f"Unable to register search map {sanitise(search_map_name)} position yet: " + f"stage {sanitise(str(search_map_params.x_stage_position))}, " + f"width {sanitise(str(search_map_params.width))}, " + f"atlas pixel size {sanitise(str(dcg.atlas_pixel_size))}" + ) + murfey_db.add(search_map) + murfey_db.commit() + if close_db: + murfey_db.close() + + +def register_batch_position_in_database( + session_id: MurfeySessionID, + batch_name: str, + batch_parameters: BatchPositionParameters, + murfey_db: Session, +): + search_map = murfey_db.exec( + select(SearchMap) + .where(SearchMap.name == batch_parameters.search_map_name) + .where(SearchMap.tag == batch_parameters.tag) + .where(SearchMap.session_id == session_id) + ).one() + + try: + tilt_series = murfey_db.exec( + select(TiltSeries) + .where(TiltSeries.tag == batch_name) + .where(TiltSeries.rsync_source == batch_parameters.tag) + .where(TiltSeries.session_id == session_id) + ).one() + if tilt_series.x_location: + logger.info( + f"Already did position analysis for tomogram {sanitise(batch_name)}" + ) + return + except Exception: + tilt_series = TiltSeries( + tag=batch_name, + rsync_source=batch_parameters.tag, + session_id=session_id, + search_map_id=search_map.id, + ) + + # Get the pixel location on the searchmap + if all( + [ + search_map.reference_matrix_m11, + search_map.stage_correction_m11, + search_map.x_stage_position, + search_map.y_stage_position, + search_map.pixel_size, + search_map.height, + search_map.width, + ] + ): + reference_shift_matrix = np.array( + [ + [ + search_map.reference_matrix_m11, + search_map.reference_matrix_m12, + ], + [ + search_map.reference_matrix_m21, + search_map.reference_matrix_m22, + ], + ] + ) + stage_correction_matrix = np.array( + [ + [ + search_map.stage_correction_m11, + search_map.stage_correction_m12, + ], + [ + search_map.stage_correction_m21, + search_map.stage_correction_m22, + ], + ] + ) + image_shift_matrix = np.array( + [ + [ + search_map.image_shift_correction_m11, + search_map.image_shift_correction_m12, + ], + [ + search_map.image_shift_correction_m21, + search_map.image_shift_correction_m22, + ], + ] + ) + + stage_vector = np.array( + [ + batch_parameters.x_stage_position - search_map.x_stage_position, + batch_parameters.y_stage_position - search_map.y_stage_position, + ] + ) + + corrected_vector = np.matmul( + np.linalg.inv(reference_shift_matrix), + np.matmul( + np.linalg.inv(stage_correction_matrix), + np.matmul( + np.linalg.inv(image_shift_matrix), + np.matmul(reference_shift_matrix, stage_vector), + ), + ), + ) + centre_batch_pixel = corrected_vector / search_map.pixel_size + [ + search_map.width / 2, + search_map.height / 2, + ] + tilt_series.x_location = ( + centre_batch_pixel[0] - batch_parameters.x_beamshift / search_map.pixel_size + ) + tilt_series.y_location = ( + centre_batch_pixel[1] - batch_parameters.y_beamshift / search_map.pixel_size + ) + else: + logger.warning( + f"Incomplete search map for position of {sanitise(batch_name)}: " + f"stage {search_map.x_stage_position}, " + f"width {search_map.width}, " + ) + murfey_db.add(tilt_series) + murfey_db.commit() diff --git a/tests/workflows/tomo/test_tomo_metadata.py b/tests/workflows/tomo/test_tomo_metadata.py new file mode 100644 index 000000000..9febae4d4 --- /dev/null +++ b/tests/workflows/tomo/test_tomo_metadata.py @@ -0,0 +1,365 @@ +from unittest import mock + +from sqlmodel import Session, select + +from murfey.util.db import DataCollectionGroup, SearchMap, TiltSeries +from murfey.util.models import BatchPositionParameters, SearchMapParameters +from murfey.workflows.tomo import tomo_metadata +from tests.conftest import ExampleVisit + + +@mock.patch("murfey.workflows.tomo.tomo_metadata._transport_object") +def test_register_search_map_update_with_dimensions( + mock_transport, murfey_db_session: Session +): + """Test the updating of an existing search map, without enough to find location""" + # Create a search map to update + search_map = SearchMap( + id=1, + name="SearchMap_1", + session_id=ExampleVisit.murfey_session_id, + tag="session_tag", + x_stage_position=0.1, + y_stage_position=0.2, + ) + murfey_db_session.add(search_map) + murfey_db_session.commit() + + # Make sure DCG is present with a pixel size + dcg = DataCollectionGroup( + id=1, + session_id=ExampleVisit.murfey_session_id, + tag="session_tag", + atlas_id=90, + atlas_pixel_size=1e-5, + ) + murfey_db_session.add(dcg) + murfey_db_session.commit() + + # Parameters to update with + new_parameters = SearchMapParameters( + tag="session_tag", + width=2000, + height=4000, + ) + + # Run the registration + tomo_metadata.register_search_map_in_database( + ExampleVisit.murfey_session_id, "SearchMap_1", new_parameters, murfey_db_session + ) + + # Check this would have updated ispyb + mock_transport.do_update_search_map.assert_called_with(1, new_parameters) + + # Confirm the database was updated + sm_final_parameters = murfey_db_session.exec(select(SearchMap)).one() + assert sm_final_parameters.width == new_parameters.width + assert sm_final_parameters.height == new_parameters.height + assert sm_final_parameters.x_stage_position == 0.1 + assert sm_final_parameters.y_stage_position == 0.2 + assert sm_final_parameters.x_location is None + + +@mock.patch("murfey.workflows.tomo.tomo_metadata._transport_object") +def test_register_search_map_update_with_all_parameters( + mock_transport, murfey_db_session: Session +): + """Test the updating of an existing search map with all required parameters""" + # Create a search map to update + search_map = SearchMap( + id=1, + name="SearchMap_1", + session_id=ExampleVisit.murfey_session_id, + tag="session_tag", + x_stage_position=0.1, + y_stage_position=0.2, + width=2000, + height=4000, + ) + murfey_db_session.add(search_map) + murfey_db_session.commit() + + # Make sure DCG is present with a pixel size + dcg = DataCollectionGroup( + id=1, + session_id=ExampleVisit.murfey_session_id, + tag="session_tag", + atlas_id=90, + atlas_pixel_size=1e-5, + ) + murfey_db_session.add(dcg) + murfey_db_session.commit() + + # Parameters to update with + new_parameters = SearchMapParameters( + tag="session_tag", + x_stage_position=0.3, + y_stage_position=0.4, + pixel_size=1e-7, + image="path/to/image", + binning=1, + reference_matrix={"m11": 1.01, "m12": 0.01, "m21": 0.02, "m22": 1.02}, + stage_correction={"m11": 0.99, "m12": -0.01, "m21": -0.02, "m22": 0.98}, + image_shift_correction={"m11": 1.03, "m12": 0.03, "m21": -0.03, "m22": 0.97}, + ) + + # Run the registration + tomo_metadata.register_search_map_in_database( + ExampleVisit.murfey_session_id, "SearchMap_1", new_parameters, murfey_db_session + ) + + # Confirm the database was updated + sm_final_parameters = murfey_db_session.exec(select(SearchMap)).one() + assert sm_final_parameters.width == 2000 + assert sm_final_parameters.height == 4000 + assert sm_final_parameters.x_stage_position == 0.3 + assert sm_final_parameters.y_stage_position == 0.4 + assert sm_final_parameters.pixel_size == 1e-7 + assert sm_final_parameters.image == "path/to/image" + assert sm_final_parameters.binning == 1 + assert sm_final_parameters.reference_matrix_m11 == 1.01 + assert sm_final_parameters.reference_matrix_m12 == 0.01 + assert sm_final_parameters.reference_matrix_m21 == 0.02 + assert sm_final_parameters.reference_matrix_m22 == 1.02 + assert sm_final_parameters.stage_correction_m11 == 0.99 + assert sm_final_parameters.stage_correction_m12 == -0.01 + assert sm_final_parameters.stage_correction_m21 == -0.02 + assert sm_final_parameters.stage_correction_m22 == 0.98 + assert sm_final_parameters.image_shift_correction_m11 == 1.03 + assert sm_final_parameters.image_shift_correction_m12 == 0.03 + assert sm_final_parameters.image_shift_correction_m21 == -0.03 + assert sm_final_parameters.image_shift_correction_m22 == 0.97 + + # These two should have been updated, but what that update should be is messy + assert sm_final_parameters.x_location is not None + assert sm_final_parameters.y_location is not None + + # Check this would have updated ispyb + mock_transport.do_update_search_map.assert_called_with(1, new_parameters) + new_parameters.x_location = sm_final_parameters.x_location + new_parameters.y_location = sm_final_parameters.y_location + new_parameters.height_on_atlas = 40 + new_parameters.width_on_atlas = 20 + mock_transport.do_update_search_map.assert_called_with(1, new_parameters) + + +@mock.patch("murfey.workflows.tomo.tomo_metadata._transport_object") +def test_register_search_map_insert_with_ispyb( + mock_transport, murfey_db_session: Session, tmp_path +): + """Insert a new search map""" + # Create a data collection group for lookups + dcg = DataCollectionGroup( + id=1, + session_id=ExampleVisit.murfey_session_id, + tag="session_tag", + atlas_id=90, + atlas_pixel_size=1e-5, + ) + murfey_db_session.add(dcg) + murfey_db_session.commit() + + # Set the ispyb return + mock_transport.do_insert_search_map.return_value = { + "return_value": 1, + "success": True, + } + + # Parameters to update with + new_parameters = SearchMapParameters( + tag="session_tag", + x_stage_position=1.3, + y_stage_position=1.4, + pixel_size=1.02, + ) + + # Run the registration + tomo_metadata.register_search_map_in_database( + ExampleVisit.murfey_session_id, "SearchMap_1", new_parameters, murfey_db_session + ) + + # Check this would have updated ispyb + mock_transport.do_insert_search_map.assert_called_with(90, new_parameters) + + # Confirm the database entry was made + sm_final_parameters = murfey_db_session.exec(select(SearchMap)).one() + assert sm_final_parameters.id == 1 + assert sm_final_parameters.name == "SearchMap_1" + assert sm_final_parameters.session_id == ExampleVisit.murfey_session_id + assert sm_final_parameters.tag == "session_tag" + assert sm_final_parameters.x_stage_position == 1.3 + assert sm_final_parameters.y_stage_position == 1.4 + assert sm_final_parameters.pixel_size == 1.02 + assert sm_final_parameters.x_location is None + + +def test_register_batch_position_update(murfey_db_session: Session): + """Test the updating of an existing tilt series with batch positions""" + # Make sure search map is present + search_map = SearchMap( + id=1, + name="SearchMap_1", + session_id=ExampleVisit.murfey_session_id, + tag="session_tag", + x_stage_position=1, + y_stage_position=2, + pixel_size=0.01, + reference_matrix_m11=1, + reference_matrix_m12=0, + reference_matrix_m21=0, + reference_matrix_m22=1, + stage_correction_m11=1, + stage_correction_m12=0, + stage_correction_m21=0, + stage_correction_m22=1, + image_shift_correction_m11=1, + image_shift_correction_m12=0, + image_shift_correction_m21=0, + image_shift_correction_m22=1, + height=4000, + width=2000, + ) + murfey_db_session.add(search_map) + murfey_db_session.commit() + + # Create a tilt series to update + tilt_series = TiltSeries( + tag="Position_1", + rsync_source="session_tag", + session_id=ExampleVisit.murfey_session_id, + search_map_id=1, + ) + murfey_db_session.add(tilt_series) + murfey_db_session.commit() + + # Parameters to update with + new_parameters = BatchPositionParameters( + tag="session_tag", + x_stage_position=0.1, + y_stage_position=0.2, + x_beamshift=0.3, + y_beamshift=0.4, + search_map_name="SearchMap_1", + ) + + # Run the registration + tomo_metadata.register_batch_position_in_database( + ExampleVisit.murfey_session_id, "Position_1", new_parameters, murfey_db_session + ) + + # These two should have been updated, values are known as used identity matrices + bp_final_parameters = murfey_db_session.exec(select(TiltSeries)).one() + assert bp_final_parameters.x_location == 880 + assert bp_final_parameters.y_location == 1780 + + +def test_register_batch_position_update_skip(murfey_db_session: Session): + """Test the updating of an existing batch position, skipped as already done""" + # Make sure search map is present + search_map = SearchMap( + id=1, + name="SearchMap_1", + session_id=ExampleVisit.murfey_session_id, + tag="session_tag", + x_stage_position=1, + y_stage_position=2, + pixel_size=0.01, + reference_matrix_m11=1, + reference_matrix_m12=0, + reference_matrix_m21=0, + reference_matrix_m22=1, + stage_correction_m11=1, + stage_correction_m12=0, + stage_correction_m21=0, + stage_correction_m22=1, + image_shift_correction_m11=1, + image_shift_correction_m12=0, + image_shift_correction_m21=0, + image_shift_correction_m22=1, + height=4000, + width=2000, + ) + murfey_db_session.add(search_map) + murfey_db_session.commit() + + # Create a tilt series to update + tilt_series = TiltSeries( + tag="Position_1", + rsync_source="session_tag", + session_id=ExampleVisit.murfey_session_id, + search_map_id=1, + x_location=100, + y_location=200, + ) + murfey_db_session.add(tilt_series) + murfey_db_session.commit() + + # Parameters to update with + new_parameters = BatchPositionParameters( + tag="session_tag", + x_stage_position=0.1, + y_stage_position=0.2, + x_beamshift=0.3, + y_beamshift=0.4, + search_map_name="SearchMap_1", + ) + + # Run the registration + tomo_metadata.register_batch_position_in_database( + ExampleVisit.murfey_session_id, "Position_1", new_parameters, murfey_db_session + ) + + # These two should have been updated, values are known as used identity matrices + bp_final_parameters = murfey_db_session.exec(select(TiltSeries)).one() + assert bp_final_parameters.x_location == 100 + assert bp_final_parameters.y_location == 200 + + +def test_register_batch_position_new(murfey_db_session: Session): + """Test the registration of a new tilt series with batch positions""" + # Make sure search map is present + search_map = SearchMap( + id=1, + name="SearchMap_1", + session_id=ExampleVisit.murfey_session_id, + tag="session_tag", + x_stage_position=1, + y_stage_position=2, + pixel_size=0.01, + reference_matrix_m11=1, + reference_matrix_m12=0, + reference_matrix_m21=0, + reference_matrix_m22=1, + stage_correction_m11=1, + stage_correction_m12=0, + stage_correction_m21=0, + stage_correction_m22=1, + image_shift_correction_m11=1, + image_shift_correction_m12=0, + image_shift_correction_m21=0, + image_shift_correction_m22=1, + height=4000, + width=2000, + ) + murfey_db_session.add(search_map) + murfey_db_session.commit() + + # Parameters to update with + new_parameters = BatchPositionParameters( + tag="session_tag", + x_stage_position=0.1, + y_stage_position=0.2, + x_beamshift=0.3, + y_beamshift=0.4, + search_map_name="SearchMap_1", + ) + + # Run the registration + tomo_metadata.register_batch_position_in_database( + ExampleVisit.murfey_session_id, "Position_1", new_parameters, murfey_db_session + ) + + # These two should have been updated, values are known as used identity matrices + bp_final_parameters = murfey_db_session.exec(select(TiltSeries)).one() + assert bp_final_parameters.x_location == 880 + assert bp_final_parameters.y_location == 1780