From babfea4b0753b08cd0a456af09dd0541e102469c Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Thu, 20 Nov 2025 16:02:49 +0000 Subject: [PATCH 01/34] Register dcg for atlases --- src/murfey/client/contexts/atlas.py | 54 +++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/src/murfey/client/contexts/atlas.py b/src/murfey/client/contexts/atlas.py index 85460d8df..780b1f45f 100644 --- a/src/murfey/client/contexts/atlas.py +++ b/src/murfey/client/contexts/atlas.py @@ -2,6 +2,8 @@ from pathlib import Path from typing import Optional +import xmltodict + from murfey.client.context import Context from murfey.client.contexts.spa import _get_source from murfey.client.contexts.spa_metadata import _atlas_destination @@ -49,3 +51,55 @@ def post_transfer( logger.info( f"Submitted request to create JPG image of atlas {str(transferred_atlas_name)!r}" ) + elif ( + environment + and "Atlas_" in transferred_file.stem + and transferred_file.suffix == ".xml" + ): + source = _get_source(transferred_file, environment) + if source: + atlas_mrc = transferred_file.with_suffix(".mrc") + transferred_atlas_name = _atlas_destination( + environment, source, atlas_mrc, self._token + ) / atlas_mrc.relative_to(source.parent) + + with open(transferred_file, "rb") as atlas_xml: + atlas_xml_data = xmltodict.parse(atlas_xml) + atlas_original_pixel_size = float( + atlas_xml_data["MicroscopeImage"]["SpatialScale"]["pixelSize"][ + "x" + ]["numericValue"] + ) + + # need to calculate the pixel size of the downscaled image + atlas_pixel_size = atlas_original_pixel_size * 7.8 + + for p in transferred_file.parts: + if p.startswith("Sample"): + sample = int(p.replace("Sample", "")) + break + else: + logger.warning( + f"Sample could not be identified for {transferred_file}" + ) + return + + dcg_data = { + "experiment_type_id": 44, # Atlas + "tag": str(transferred_file.parent), + "atlas": str(transferred_atlas_name), + "sample": sample, + "atlas_pixel_size": atlas_pixel_size, + } + capture_post( + base_url=str(environment.url.geturl()), + router_name="workflow.router", + function_name="register_dc_group", + token=self._token, + visit_name=environment.visit, + session_id=environment.murfey_session, + data=dcg_data, + ) + logger.info( + f"Registered data collection group for atlas {str(transferred_atlas_name)!r}" + ) From a9249da03b3158486c1e6b34e9522123448e69a8 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Fri, 21 Nov 2025 09:54:38 +0000 Subject: [PATCH 02/34] Always send atlas with tomo metadata dcg registration --- src/murfey/client/contexts/tomo_metadata.py | 120 ++++++++------------ 1 file changed, 49 insertions(+), 71 deletions(-) diff --git a/src/murfey/client/contexts/tomo_metadata.py b/src/murfey/client/contexts/tomo_metadata.py index 45d22a229..139627e46 100644 --- a/src/murfey/client/contexts/tomo_metadata.py +++ b/src/murfey/client/contexts/tomo_metadata.py @@ -20,10 +20,58 @@ def ensure_dcg_exists( source = _get_source(transferred_file, environment=environment) if not source: return None + source_visit_dir = source.parent + + session_file = source / "Session.dm" + if not session_file.is_file(): + logger.warning(f"Cannot find session file {str(session_file)}") + return + with open(session_file, "r") as session_xml: + session_data = xmltodict.parse(session_xml.read()) + + windows_path = session_data["TomographySession"]["AtlasId"] + logger.info(f"Windows path to atlas metadata found: {windows_path}") + if not windows_path: + logger.warning("No atlas metadata path found") + return + visit_index = windows_path.split("\\").index(environment.visit) + partial_path = "/".join(windows_path.split("\\")[visit_index + 1 :]) + logger.info("Partial Linux path successfully constructed from Windows path") + + logger.info( + f"Looking for atlas XML file in metadata directory {str((source_visit_dir / partial_path).parent)}" + ) + atlas_xml_path = list((source_visit_dir / partial_path).parent.glob("Atlas_*.xml"))[ + 0 + ] + logger.info(f"Atlas XML path {str(atlas_xml_path)} found") + with open(atlas_xml_path, "rb") as atlas_xml: + atlas_xml_data = xmltodict.parse(atlas_xml) + atlas_pixel_size = float( + atlas_xml_data["MicroscopeImage"]["SpatialScale"]["pixelSize"]["x"][ + "numericValue" + ] + ) + + for p in partial_path.split("/"): + if p.startswith("Sample"): + sample = int(p.replace("Sample", "")) + break + else: + logger.warning(f"Sample could not be identified for {transferred_file}") + return + environment.samples[source] = SampleInfo(atlas=Path(partial_path), sample=sample) dcg_tag = str(source).replace(f"/{environment.visit}", "") dcg_data = { "experiment_type_id": 36, # Tomo "tag": dcg_tag, + "atlas": str( + _atlas_destination(environment, source, session_file, token) + / environment.samples[source].atlas.parent + / atlas_xml_path.with_suffix(".jpg").name + ), + "sample": environment.samples[source].sample, + "atlas_pixel_size": atlas_pixel_size, } capture_post( base_url=str(environment.url.geturl()), @@ -56,77 +104,7 @@ def post_transfer( if transferred_file.name == "Session.dm" and environment: logger.info("Tomography session metadata found") - with open(transferred_file, "r") as session_xml: - session_data = xmltodict.parse(session_xml.read()) - - windows_path = session_data["TomographySession"]["AtlasId"] - logger.info(f"Windows path to atlas metadata found: {windows_path}") - if not windows_path: - logger.warning("No atlas metadata path found") - return - visit_index = windows_path.split("\\").index(environment.visit) - partial_path = "/".join(windows_path.split("\\")[visit_index + 1 :]) - logger.info("Partial Linux path successfully constructed from Windows path") - - source = _get_source(transferred_file, environment) - if not source: - logger.warning( - f"Source could not be identified for {str(transferred_file)}" - ) - return - - source_visit_dir = source.parent - - logger.info( - f"Looking for atlas XML file in metadata directory {str((source_visit_dir / partial_path).parent)}" - ) - atlas_xml_path = list( - (source_visit_dir / partial_path).parent.glob("Atlas_*.xml") - )[0] - logger.info(f"Atlas XML path {str(atlas_xml_path)} found") - with open(atlas_xml_path, "rb") as atlas_xml: - atlas_xml_data = xmltodict.parse(atlas_xml) - atlas_pixel_size = float( - atlas_xml_data["MicroscopeImage"]["SpatialScale"]["pixelSize"]["x"][ - "numericValue" - ] - ) - - for p in partial_path.split("/"): - if p.startswith("Sample"): - sample = int(p.replace("Sample", "")) - break - else: - logger.warning(f"Sample could not be identified for {transferred_file}") - return - environment.samples[source] = SampleInfo( - atlas=Path(partial_path), sample=sample - ) - dcg_tag = "/".join( - p for p in transferred_file.parent.parts if p != environment.visit - ).replace("//", "/") - dcg_data = { - "experiment_type_id": 36, # Tomo - "tag": dcg_tag, - "atlas": str( - _atlas_destination( - environment, source, transferred_file, self._token - ) - / environment.samples[source].atlas.parent - / atlas_xml_path.with_suffix(".jpg").name - ), - "sample": environment.samples[source].sample, - "atlas_pixel_size": atlas_pixel_size, - } - capture_post( - base_url=str(environment.url.geturl()), - router_name="workflow.router", - function_name="register_dc_group", - token=self._token, - visit_name=environment.visit, - session_id=environment.murfey_session, - data=dcg_data, - ) + ensure_dcg_exists(transferred_file, environment, self._token) elif transferred_file.name == "SearchMap.xml" and environment: logger.info("Tomography session search map xml found") From 6e08588a68513213c4811a1d3ce9f5d83955e8e8 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Fri, 21 Nov 2025 10:07:56 +0000 Subject: [PATCH 03/34] Always yse tomo metadata function for tomo dcgs --- src/murfey/client/contexts/tomo.py | 18 ++----- src/murfey/client/contexts/tomo_metadata.py | 54 ++++++++++++--------- 2 files changed, 35 insertions(+), 37 deletions(-) diff --git a/src/murfey/client/contexts/tomo.py b/src/murfey/client/contexts/tomo.py index 6c4eff7ea..e63b38acf 100644 --- a/src/murfey/client/contexts/tomo.py +++ b/src/murfey/client/contexts/tomo.py @@ -9,6 +9,7 @@ import murfey.util.eer from murfey.client.context import Context, ProcessingParameter +from murfey.client.contexts.tomo_metadata import ensure_tomo_dcg_exists from murfey.client.instance_environment import ( MovieID, MovieTracker, @@ -101,21 +102,10 @@ def register_tomography_data_collections( ) return try: - dcg_data = { - "experiment_type_id": 36, # Tomo - "tag": str(self._basepath), - "atlas": "", - "sample": None, - } - capture_post( - base_url=str(environment.url.geturl()), - router_name="workflow.router", - function_name="register_dc_group", - token=self._token, - visit_name=environment.visit, - session_id=environment.murfey_session, - data=dcg_data, + metadata_source = ( + self._basepath.parent / environment.visit / self._basepath.name ) + ensure_tomo_dcg_exists(metadata_source, environment, self._token) for tilt_series in self._tilt_series.keys(): if tilt_series not in self._tilt_series_with_pjids: diff --git a/src/murfey/client/contexts/tomo_metadata.py b/src/murfey/client/contexts/tomo_metadata.py index 139627e46..831acd6fc 100644 --- a/src/murfey/client/contexts/tomo_metadata.py +++ b/src/murfey/client/contexts/tomo_metadata.py @@ -13,16 +13,11 @@ logger = logging.getLogger("murfey.client.contexts.tomo_metadata") -def ensure_dcg_exists( - transferred_file: Path, environment: MurfeyInstanceEnvironment, token: str +def ensure_tomo_dcg_exists( + metadata_source: Path, environment: MurfeyInstanceEnvironment, token: str ): - # Make sure we have a data collection group - source = _get_source(transferred_file, environment=environment) - if not source: - return None - source_visit_dir = source.parent - - session_file = source / "Session.dm" + """Create a tomography data collection group""" + session_file = metadata_source / "Session.dm" if not session_file.is_file(): logger.warning(f"Cannot find session file {str(session_file)}") return @@ -38,6 +33,7 @@ def ensure_dcg_exists( partial_path = "/".join(windows_path.split("\\")[visit_index + 1 :]) logger.info("Partial Linux path successfully constructed from Windows path") + source_visit_dir = metadata_source.parent logger.info( f"Looking for atlas XML file in metadata directory {str((source_visit_dir / partial_path).parent)}" ) @@ -58,19 +54,21 @@ def ensure_dcg_exists( sample = int(p.replace("Sample", "")) break else: - logger.warning(f"Sample could not be identified for {transferred_file}") + logger.warning(f"Sample could not be identified for {metadata_source}") return - environment.samples[source] = SampleInfo(atlas=Path(partial_path), sample=sample) - dcg_tag = str(source).replace(f"/{environment.visit}", "") + environment.samples[metadata_source] = SampleInfo( + atlas=Path(partial_path), sample=sample + ) + dcg_tag = str(metadata_source).replace(f"/{environment.visit}", "") dcg_data = { "experiment_type_id": 36, # Tomo "tag": dcg_tag, "atlas": str( - _atlas_destination(environment, source, session_file, token) - / environment.samples[source].atlas.parent + _atlas_destination(environment, metadata_source, session_file, token) + / environment.samples[metadata_source].atlas.parent / atlas_xml_path.with_suffix(".jpg").name ), - "sample": environment.samples[source].sample, + "sample": environment.samples[metadata_source].sample, "atlas_pixel_size": atlas_pixel_size, } capture_post( @@ -102,13 +100,23 @@ def post_transfer( **kwargs, ) - if transferred_file.name == "Session.dm" and environment: + if environment is None: + logger.warning("No environment set") + return + + metadata_source = _get_source(transferred_file, environment=environment) + if not metadata_source: + logger.warning(f"No source found for {str(transferred_file)}") + return + + if transferred_file.name == "Session.dm": logger.info("Tomography session metadata found") - ensure_dcg_exists(transferred_file, environment, self._token) + ensure_tomo_dcg_exists(metadata_source, environment, self._token) - elif transferred_file.name == "SearchMap.xml" and environment: + elif transferred_file.name == "SearchMap.xml": logger.info("Tomography session search map xml found") - dcg_tag = ensure_dcg_exists(transferred_file, environment, self._token) + + dcg_tag = ensure_tomo_dcg_exists(metadata_source, environment, self._token) with open(transferred_file, "r") as sm_xml: sm_data = xmltodict.parse(sm_xml.read()) @@ -208,9 +216,9 @@ def post_transfer( }, ) - elif transferred_file.name == "SearchMap.dm" and environment: + elif transferred_file.name == "SearchMap.dm": logger.info("Tomography session search map dm found") - dcg_tag = ensure_dcg_exists(transferred_file, environment, self._token) + dcg_tag = ensure_tomo_dcg_exists(metadata_source, environment, self._token) with open(transferred_file, "r") as sm_xml: sm_data = xmltodict.parse(sm_xml.read()) @@ -254,9 +262,9 @@ def post_transfer( }, ) - elif transferred_file.name == "BatchPositionsList.xml" and environment: + elif transferred_file.name == "BatchPositionsList.xml": logger.info("Tomography session batch positions list found") - dcg_tag = ensure_dcg_exists(transferred_file, environment, self._token) + dcg_tag = ensure_tomo_dcg_exists(metadata_source, environment, self._token) with open(transferred_file) as xml: for_parsing = xml.read() batch_xml = xmltodict.parse(for_parsing) From 663361c587500ce2182cf9997c80830cd49a37a2 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Fri, 21 Nov 2025 11:01:24 +0000 Subject: [PATCH 04/34] Common function for dcg registration --- src/murfey/client/context.py | 144 +++++++++++++++++++- src/murfey/client/contexts/atlas.py | 5 +- src/murfey/client/contexts/spa_metadata.py | 141 ++----------------- src/murfey/client/contexts/tomo_metadata.py | 89 ++---------- src/murfey/workflows/tomo/tomo_metadata.py | 8 +- 5 files changed, 167 insertions(+), 220 deletions(-) diff --git a/src/murfey/client/context.py b/src/murfey/client/context.py index 15cc61f0e..860c0d95b 100644 --- a/src/murfey/client/context.py +++ b/src/murfey/client/context.py @@ -3,16 +3,150 @@ import logging from importlib.metadata import entry_points from pathlib import Path -from typing import Any, Dict, List, NamedTuple +from typing import Any, List, NamedTuple -from murfey.client.instance_environment import MurfeyInstanceEnvironment +import xmltodict + +from murfey.client.instance_environment import MurfeyInstanceEnvironment, SampleInfo +from murfey.util.client import capture_post, get_machine_config_client logger = logging.getLogger("murfey.client.context") -class FutureRequest(NamedTuple): - url: str - message: Dict[str, Any] +def _atlas_destination( + environment: MurfeyInstanceEnvironment, source: Path, token: str +) -> Path: + machine_config = get_machine_config_client( + str(environment.url.geturl()), + token, + instrument_name=environment.instrument_name, + demo=environment.demo, + ) + for i, destination_part in enumerate( + Path(environment.default_destinations[source]).parts + ): + if destination_part == environment.visit: + return Path(machine_config.get("rsync_basepath", "")) / "/".join( + Path(environment.default_destinations[source]).parent.parts[: i + 1] + ) + return ( + Path(machine_config.get("rsync_basepath", "")) + / Path(environment.default_destinations[source]).parent + / environment.visit + ) + + +def ensure_dcg_exists( + collection_type: str, + metadata_source: Path, + environment: MurfeyInstanceEnvironment, + token: str, +): + """Create a data collection group""" + if collection_type == "tomo": + experiment_type_id = 36 + session_file = metadata_source / "Session.dm" + elif collection_type == "spa": + experiment_type_id = 37 + session_file = metadata_source / "EpuSession.dm" + else: + logger.error(f"Unknown collection type {collection_type}") + return + + if not session_file.is_file(): + logger.warning(f"Cannot find session file {str(session_file)}") + dcg_tag = ( + str(metadata_source).replace(f"/{environment.visit}", "").replace("//", "/") + ) + dcg_data = { + "experiment_type_id": experiment_type_id, + "tag": dcg_tag, + } + else: + with open(session_file, "r") as session_xml: + session_data = xmltodict.parse(session_xml.read()) + + if collection_type == "tomo": + windows_path = session_data["TomographySession"]["AtlasId"] + else: + windows_path = session_data["EpuSessionXml"]["Samples"]["_items"][ + "SampleXml" + ][0]["AtlasId"]["#text"] + + logger.info(f"Windows path to atlas metadata found: {windows_path}") + if not windows_path: + logger.warning("No atlas metadata path found") + return + visit_index = windows_path.split("\\").index(environment.visit) + partial_path = "/".join(windows_path.split("\\")[visit_index + 1 :]) + logger.info("Partial Linux path successfully constructed from Windows path") + + source_visit_dir = metadata_source.parent + logger.info( + f"Looking for atlas XML file in metadata directory {str((source_visit_dir / partial_path).parent)}" + ) + atlas_xml_path = list( + (source_visit_dir / partial_path).parent.glob("Atlas_*.xml") + )[0] + logger.info(f"Atlas XML path {str(atlas_xml_path)} found") + with open(atlas_xml_path, "rb") as atlas_xml: + atlas_xml_data = xmltodict.parse(atlas_xml) + atlas_original_pixel_size = float( + atlas_xml_data["MicroscopeImage"]["SpatialScale"]["pixelSize"]["x"][ + "numericValue" + ] + ) + # need to calculate the pixel size of the downscaled image + atlas_pixel_size = atlas_original_pixel_size * 7.8 + logger.info(f"Atlas image pixel size determined to be {atlas_pixel_size}") + + for p in partial_path.split("/"): + if p.startswith("Sample"): + sample = int(p.replace("Sample", "")) + break + else: + logger.warning(f"Sample could not be identified for {metadata_source}") + return + environment.samples[metadata_source] = SampleInfo( + atlas=Path(partial_path), sample=sample + ) + + dcg_search_dir = ( + str(metadata_source).replace(f"/{environment.visit}", "").replace("//", "/") + ) + if collection_type == "tomo": + dcg_tag = dcg_search_dir + else: + dcg_images_dirs = sorted( + Path(dcg_search_dir).glob("Images-Disc*"), + key=lambda x: x.stat().st_ctime, + ) + if not dcg_images_dirs: + logger.warning(f"Cannot find Images-Disc* in {dcg_search_dir}") + return + dcg_tag = str(dcg_images_dirs[-1]) + + dcg_data = { + "experiment_type_id": experiment_type_id, + "tag": dcg_tag, + "atlas": str( + _atlas_destination(environment, metadata_source, token) + / environment.samples[metadata_source].atlas.parent + / atlas_xml_path.with_suffix(".jpg").name + ), + "sample": environment.samples[metadata_source].sample, + "atlas_pixel_size": atlas_pixel_size, + } + capture_post( + base_url=str(environment.url.geturl()), + router_name="workflow.router", + function_name="register_dc_group", + token=token, + visit_name=environment.visit, + session_id=environment.murfey_session, + data=dcg_data, + ) + return dcg_tag class ProcessingParameter(NamedTuple): diff --git a/src/murfey/client/contexts/atlas.py b/src/murfey/client/contexts/atlas.py index 85460d8df..72451d231 100644 --- a/src/murfey/client/contexts/atlas.py +++ b/src/murfey/client/contexts/atlas.py @@ -2,9 +2,8 @@ from pathlib import Path from typing import Optional -from murfey.client.context import Context +from murfey.client.context import Context, _atlas_destination from murfey.client.contexts.spa import _get_source -from murfey.client.contexts.spa_metadata import _atlas_destination from murfey.client.instance_environment import MurfeyInstanceEnvironment from murfey.util.client import capture_post @@ -36,7 +35,7 @@ def post_transfer( source = _get_source(transferred_file, environment) if source: transferred_atlas_name = _atlas_destination( - environment, source, transferred_file, self._token + environment, source, self._token ) / transferred_file.relative_to(source.parent) capture_post( base_url=str(environment.url.geturl()), diff --git a/src/murfey/client/contexts/spa_metadata.py b/src/murfey/client/contexts/spa_metadata.py index f8c2e54fe..4cce22ddd 100644 --- a/src/murfey/client/contexts/spa_metadata.py +++ b/src/murfey/client/contexts/spa_metadata.py @@ -4,10 +4,10 @@ import xmltodict -from murfey.client.context import Context +from murfey.client.context import Context, ensure_dcg_exists from murfey.client.contexts.spa import _file_transferred_to, _get_source -from murfey.client.instance_environment import MurfeyInstanceEnvironment, SampleInfo -from murfey.util.client import capture_post, get_machine_config_client +from murfey.client.instance_environment import MurfeyInstanceEnvironment +from murfey.util.client import capture_post from murfey.util.spa_metadata import ( FoilHoleInfo, get_grid_square_atlas_positions, @@ -69,29 +69,6 @@ def _foil_hole_positions(xml_path: Path, grid_square: int) -> Dict[str, FoilHole return foil_holes -def _atlas_destination( - environment: MurfeyInstanceEnvironment, source: Path, file_path: Path, token: str -) -> Path: - machine_config = get_machine_config_client( - str(environment.url.geturl()), - token, - instrument_name=environment.instrument_name, - demo=environment.demo, - ) - for i, destination_part in enumerate( - Path(environment.default_destinations[source]).parts - ): - if destination_part == environment.visit: - return Path(machine_config.get("rsync_basepath", "")) / "/".join( - Path(environment.default_destinations[source]).parent.parts[: i + 1] - ) - return ( - Path(machine_config.get("rsync_basepath", "")) - / Path(environment.default_destinations[source]).parent - / environment.visit - ) - - class SPAMetadataContext(Context): def __init__(self, acquisition_software: str, basepath: Path, token: str): super().__init__("SPA_metadata", acquisition_software, token) @@ -124,82 +101,14 @@ def post_transfer( source = _get_source(transferred_file, environment) if not source: logger.warning( - f"Source could not be indentified for {str(transferred_file)}" + f"Source could not be identified for {str(transferred_file)}" ) return - source_visit_dir = source.parent - - logger.info( - f"Looking for atlas XML file in metadata directory {str((source_visit_dir / partial_path).parent)}" - ) - atlas_xml_path = list( - (source_visit_dir / partial_path).parent.glob("Atlas_*.xml") - )[0] - logger.info(f"Atlas XML path {str(atlas_xml_path)} found") - with open(atlas_xml_path, "rb") as atlas_xml: - atlas_xml_data = xmltodict.parse(atlas_xml) - atlas_original_pixel_size = float( - atlas_xml_data["MicroscopeImage"]["SpatialScale"]["pixelSize"]["x"][ - "numericValue" - ] - ) - - # need to calculate the pixel size of the downscaled image - atlas_pixel_size = atlas_original_pixel_size * 7.8 - logger.info(f"Atlas image pixel size determined to be {atlas_pixel_size}") - - for p in partial_path.split("/"): - if p.startswith("Sample"): - sample = int(p.replace("Sample", "")) - break - else: - logger.warning(f"Sample could not be identified for {transferred_file}") - return if source: - environment.samples[source] = SampleInfo( - atlas=Path(partial_path), sample=sample - ) - dcg_search_dir = "/".join( - p for p in transferred_file.parent.parts if p != environment.visit - ) - dcg_search_dir = ( - dcg_search_dir[1:] - if dcg_search_dir.startswith("//") - else dcg_search_dir - ) - dcg_images_dirs = sorted( - Path(dcg_search_dir).glob("Images-Disc*"), - key=lambda x: x.stat().st_ctime, - ) - if not dcg_images_dirs: - logger.warning(f"Cannot find Images-Disc* in {dcg_search_dir}") - return - dcg_tag = str(dcg_images_dirs[-1]) - dcg_data = { - "experiment_type_id": 37, # Single particle - "tag": dcg_tag, - "atlas": str( - _atlas_destination( - environment, source, transferred_file, self._token - ) - / environment.samples[source].atlas.parent - / atlas_xml_path.with_suffix(".jpg").name - ), - "sample": environment.samples[source].sample, - "atlas_pixel_size": atlas_pixel_size, - } - capture_post( - base_url=str(environment.url.geturl()), - router_name="workflow.router", - function_name="register_dc_group", - token=self._token, - visit_name=environment.visit, - session_id=environment.murfey_session, - data=dcg_data, - ) + dcg_tag = ensure_dcg_exists("spa", source, environment, self._token) gs_pix_positions = get_grid_square_atlas_positions( - source_visit_dir / partial_path + source.parent / partial_path ) for gs, pos_data in gs_pix_positions.items(): if pos_data: @@ -228,46 +137,16 @@ def post_transfer( and environment ): # Make sure we have a data collection group before trying to register grid square - dcg_search_dir = "/".join( - p - for p in transferred_file.parent.parent.parts - if p != environment.visit - ) - dcg_search_dir = ( - dcg_search_dir[1:] - if dcg_search_dir.startswith("//") - else dcg_search_dir - ) - dcg_images_dirs = sorted( - Path(dcg_search_dir).glob("Images-Disc*"), - key=lambda x: x.stat().st_ctime, - ) - if not dcg_images_dirs: - logger.warning(f"Cannot find Images-Disc* in {dcg_search_dir}") - return - dcg_tag = str(dcg_images_dirs[-1]) - dcg_data = { - "experiment_type_id": 37, # Single particle - "tag": dcg_tag, - } - capture_post( - base_url=str(environment.url.geturl()), - router_name="workflow.router", - function_name="register_dc_group", - token=self._token, - visit_name=environment.visit, - session_id=environment.murfey_session, - data=dcg_data, - ) + source = _get_source(transferred_file, environment=environment) + if source is None: + return None + ensure_dcg_exists("spa", source, environment, self._token) gs_name = int(transferred_file.stem.split("_")[1]) logger.info( f"Collecting foil hole positions for {str(transferred_file)} and grid square {gs_name}" ) fh_positions = _foil_hole_positions(transferred_file, gs_name) - source = _get_source(transferred_file, environment=environment) - if source is None: - return None visitless_source_search_dir = str(source).replace( f"/{environment.visit}", "" ) diff --git a/src/murfey/client/contexts/tomo_metadata.py b/src/murfey/client/contexts/tomo_metadata.py index 831acd6fc..76c6d60b5 100644 --- a/src/murfey/client/contexts/tomo_metadata.py +++ b/src/murfey/client/contexts/tomo_metadata.py @@ -4,85 +4,14 @@ import xmltodict -from murfey.client.context import Context +from murfey.client.context import Context, ensure_dcg_exists from murfey.client.contexts.spa import _file_transferred_to, _get_source -from murfey.client.contexts.spa_metadata import _atlas_destination -from murfey.client.instance_environment import MurfeyInstanceEnvironment, SampleInfo +from murfey.client.instance_environment import MurfeyInstanceEnvironment from murfey.util.client import capture_post logger = logging.getLogger("murfey.client.contexts.tomo_metadata") -def ensure_tomo_dcg_exists( - metadata_source: Path, environment: MurfeyInstanceEnvironment, token: str -): - """Create a tomography data collection group""" - session_file = metadata_source / "Session.dm" - if not session_file.is_file(): - logger.warning(f"Cannot find session file {str(session_file)}") - return - with open(session_file, "r") as session_xml: - session_data = xmltodict.parse(session_xml.read()) - - windows_path = session_data["TomographySession"]["AtlasId"] - logger.info(f"Windows path to atlas metadata found: {windows_path}") - if not windows_path: - logger.warning("No atlas metadata path found") - return - visit_index = windows_path.split("\\").index(environment.visit) - partial_path = "/".join(windows_path.split("\\")[visit_index + 1 :]) - logger.info("Partial Linux path successfully constructed from Windows path") - - source_visit_dir = metadata_source.parent - logger.info( - f"Looking for atlas XML file in metadata directory {str((source_visit_dir / partial_path).parent)}" - ) - atlas_xml_path = list((source_visit_dir / partial_path).parent.glob("Atlas_*.xml"))[ - 0 - ] - logger.info(f"Atlas XML path {str(atlas_xml_path)} found") - with open(atlas_xml_path, "rb") as atlas_xml: - atlas_xml_data = xmltodict.parse(atlas_xml) - atlas_pixel_size = float( - atlas_xml_data["MicroscopeImage"]["SpatialScale"]["pixelSize"]["x"][ - "numericValue" - ] - ) - - for p in partial_path.split("/"): - if p.startswith("Sample"): - sample = int(p.replace("Sample", "")) - break - else: - logger.warning(f"Sample could not be identified for {metadata_source}") - return - environment.samples[metadata_source] = SampleInfo( - atlas=Path(partial_path), sample=sample - ) - dcg_tag = str(metadata_source).replace(f"/{environment.visit}", "") - dcg_data = { - "experiment_type_id": 36, # Tomo - "tag": dcg_tag, - "atlas": str( - _atlas_destination(environment, metadata_source, session_file, token) - / environment.samples[metadata_source].atlas.parent - / atlas_xml_path.with_suffix(".jpg").name - ), - "sample": environment.samples[metadata_source].sample, - "atlas_pixel_size": atlas_pixel_size, - } - capture_post( - base_url=str(environment.url.geturl()), - router_name="workflow.router", - function_name="register_dc_group", - token=token, - visit_name=environment.visit, - session_id=environment.murfey_session, - data=dcg_data, - ) - return dcg_tag - - class TomographyMetadataContext(Context): def __init__(self, acquisition_software: str, basepath: Path, token: str): super().__init__("Tomography_metadata", acquisition_software, token) @@ -111,12 +40,14 @@ def post_transfer( if transferred_file.name == "Session.dm": logger.info("Tomography session metadata found") - ensure_tomo_dcg_exists(metadata_source, environment, self._token) + ensure_dcg_exists("tomo", metadata_source, environment, self._token) elif transferred_file.name == "SearchMap.xml": logger.info("Tomography session search map xml found") - dcg_tag = ensure_tomo_dcg_exists(metadata_source, environment, self._token) + dcg_tag = ensure_dcg_exists( + "tomo", metadata_source, environment, self._token + ) with open(transferred_file, "r") as sm_xml: sm_data = xmltodict.parse(sm_xml.read()) @@ -218,7 +149,9 @@ def post_transfer( elif transferred_file.name == "SearchMap.dm": logger.info("Tomography session search map dm found") - dcg_tag = ensure_tomo_dcg_exists(metadata_source, environment, self._token) + dcg_tag = ensure_dcg_exists( + "tomo", metadata_source, environment, self._token + ) with open(transferred_file, "r") as sm_xml: sm_data = xmltodict.parse(sm_xml.read()) @@ -264,7 +197,9 @@ def post_transfer( elif transferred_file.name == "BatchPositionsList.xml": logger.info("Tomography session batch positions list found") - dcg_tag = ensure_tomo_dcg_exists(metadata_source, environment, self._token) + dcg_tag = ensure_dcg_exists( + "tomo", metadata_source, environment, self._token + ) with open(transferred_file) as xml: for_parsing = xml.read() batch_xml = xmltodict.parse(for_parsing) diff --git a/src/murfey/workflows/tomo/tomo_metadata.py b/src/murfey/workflows/tomo/tomo_metadata.py index 2dbc25cb7..23b6490fb 100644 --- a/src/murfey/workflows/tomo/tomo_metadata.py +++ b/src/murfey/workflows/tomo/tomo_metadata.py @@ -215,16 +215,16 @@ def register_search_map_in_database( # Convert from metres to pixels search_map_params.height_on_atlas = int( - search_map.height * search_map.pixel_size / dcg.atlas_pixel_size + search_map.height * search_map.pixel_size / dcg.atlas_pixel_size * 7.8 ) search_map_params.width_on_atlas = int( - search_map.width * search_map.pixel_size / dcg.atlas_pixel_size + search_map.width * search_map.pixel_size / dcg.atlas_pixel_size * 7.8 ) search_map_params.x_location = float( - corrected_vector[0] / dcg.atlas_pixel_size + 2003 + corrected_vector[0] / dcg.atlas_pixel_size * 7.8 + 2003 ) search_map_params.y_location = float( - corrected_vector[1] / dcg.atlas_pixel_size + 2003 + corrected_vector[1] / dcg.atlas_pixel_size * 7.8 + 2003 ) search_map.x_location = search_map_params.x_location search_map.y_location = search_map_params.y_location From 6fe40c223fef9367cdd1fcff60315880aed9f588 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Fri, 21 Nov 2025 11:32:32 +0000 Subject: [PATCH 05/34] Always use the dcg function in the client --- src/murfey/client/contexts/spa_metadata.py | 14 ++++++-- src/murfey/client/contexts/tomo.py | 10 ++++-- src/murfey/client/contexts/tomo_metadata.py | 22 +++++++++--- src/murfey/client/multigrid_control.py | 35 ++++++++----------- src/murfey/client/tui/app.py | 37 +++++++++------------ 5 files changed, 66 insertions(+), 52 deletions(-) diff --git a/src/murfey/client/contexts/spa_metadata.py b/src/murfey/client/contexts/spa_metadata.py index 4cce22ddd..52785ad96 100644 --- a/src/murfey/client/contexts/spa_metadata.py +++ b/src/murfey/client/contexts/spa_metadata.py @@ -106,7 +106,12 @@ def post_transfer( return if source: - dcg_tag = ensure_dcg_exists("spa", source, environment, self._token) + dcg_tag = ensure_dcg_exists( + collection_type="spa", + metadata_source=source, + environment=environment, + token=self._token, + ) gs_pix_positions = get_grid_square_atlas_positions( source.parent / partial_path ) @@ -140,7 +145,12 @@ def post_transfer( source = _get_source(transferred_file, environment=environment) if source is None: return None - ensure_dcg_exists("spa", source, environment, self._token) + ensure_dcg_exists( + collection_type="spa", + metadata_source=source, + environment=environment, + token=self._token, + ) gs_name = int(transferred_file.stem.split("_")[1]) logger.info( diff --git a/src/murfey/client/contexts/tomo.py b/src/murfey/client/contexts/tomo.py index e63b38acf..5df9bb1e9 100644 --- a/src/murfey/client/contexts/tomo.py +++ b/src/murfey/client/contexts/tomo.py @@ -8,8 +8,7 @@ import xmltodict import murfey.util.eer -from murfey.client.context import Context, ProcessingParameter -from murfey.client.contexts.tomo_metadata import ensure_tomo_dcg_exists +from murfey.client.context import Context, ProcessingParameter, ensure_dcg_exists from murfey.client.instance_environment import ( MovieID, MovieTracker, @@ -105,7 +104,12 @@ def register_tomography_data_collections( metadata_source = ( self._basepath.parent / environment.visit / self._basepath.name ) - ensure_tomo_dcg_exists(metadata_source, environment, self._token) + ensure_dcg_exists( + collection_type="tomo", + metadata_source=metadata_source, + environment=environment, + token=self._token, + ) for tilt_series in self._tilt_series.keys(): if tilt_series not in self._tilt_series_with_pjids: diff --git a/src/murfey/client/contexts/tomo_metadata.py b/src/murfey/client/contexts/tomo_metadata.py index 76c6d60b5..7d85a6a5a 100644 --- a/src/murfey/client/contexts/tomo_metadata.py +++ b/src/murfey/client/contexts/tomo_metadata.py @@ -40,13 +40,21 @@ def post_transfer( if transferred_file.name == "Session.dm": logger.info("Tomography session metadata found") - ensure_dcg_exists("tomo", metadata_source, environment, self._token) + ensure_dcg_exists( + collection_type="tomo", + metadata_source=metadata_source, + environment=environment, + token=self._token, + ) elif transferred_file.name == "SearchMap.xml": logger.info("Tomography session search map xml found") dcg_tag = ensure_dcg_exists( - "tomo", metadata_source, environment, self._token + collection_type="tomo", + metadata_source=metadata_source, + environment=environment, + token=self._token, ) with open(transferred_file, "r") as sm_xml: sm_data = xmltodict.parse(sm_xml.read()) @@ -150,7 +158,10 @@ def post_transfer( elif transferred_file.name == "SearchMap.dm": logger.info("Tomography session search map dm found") dcg_tag = ensure_dcg_exists( - "tomo", metadata_source, environment, self._token + collection_type="tomo", + metadata_source=metadata_source, + environment=environment, + token=self._token, ) with open(transferred_file, "r") as sm_xml: sm_data = xmltodict.parse(sm_xml.read()) @@ -198,7 +209,10 @@ def post_transfer( elif transferred_file.name == "BatchPositionsList.xml": logger.info("Tomography session batch positions list found") dcg_tag = ensure_dcg_exists( - "tomo", metadata_source, environment, self._token + collection_type="tomo", + metadata_source=metadata_source, + environment=environment, + token=self._token, ) with open(transferred_file) as xml: for_parsing = xml.read() diff --git a/src/murfey/client/multigrid_control.py b/src/murfey/client/multigrid_control.py index 78b616144..b6e9e6c3d 100644 --- a/src/murfey/client/multigrid_control.py +++ b/src/murfey/client/multigrid_control.py @@ -12,6 +12,7 @@ import murfey.client.websocket from murfey.client.analyser import Analyser +from murfey.client.context import ensure_dcg_exists from murfey.client.contexts.spa import SPAModularContext from murfey.client.contexts.tomo import TomographyContext from murfey.client.destinations import determine_default_destination @@ -610,28 +611,20 @@ def _start_dc(self, metadata_json, from_form: bool = False): log.info("Tomography processing flushed") elif isinstance(context, SPAModularContext): - dcg_data = { - "experiment_type_id": 37, # Single particle - "tag": str(source), - "atlas": ( - str(self._environment.samples[source].atlas) - if self._environment.samples.get(source) - else "" - ), - "sample": ( - self._environment.samples[source].sample - if self._environment.samples.get(source) - else None - ), - } - capture_post( - base_url=str(self._environment.url.geturl()), - router_name="workflow.router", - function_name="register_dc_group", + if self._environment.visit in source.parts: + metadata_source = source + else: + metadata_source_as_str = ( + "/".join(source.parts[:-2]) + + f"/{self._environment.visit}/" + + source.parts[-2] + ) + metadata_source = Path(metadata_source_as_str.replace("//", "/")) + ensure_dcg_exists( + collection_type="spa", + metadata_source=metadata_source, + environment=self._environment, token=self.token, - visit_name=self._environment.visit, - session_id=self.session_id, - data=dcg_data, ) if from_form: data = { diff --git a/src/murfey/client/tui/app.py b/src/murfey/client/tui/app.py index 334a45187..3ab2cb616 100644 --- a/src/murfey/client/tui/app.py +++ b/src/murfey/client/tui/app.py @@ -14,6 +14,7 @@ from textual.widgets import Button, Input from murfey.client.analyser import Analyser +from murfey.client.context import ensure_dcg_exists from murfey.client.contexts.spa import SPAModularContext from murfey.client.contexts.tomo import TomographyContext from murfey.client.destinations import determine_default_destination @@ -561,28 +562,20 @@ def _start_dc(self, metadata_json, from_form: bool = False): ) log.info("Tomography processing flushed") elif isinstance(context, SPAModularContext): - dcg_data = { - "experiment_type_id": 37, # Single particle - "tag": str(source), - "atlas": ( - str(self._environment.samples[source].atlas) - if self._environment.samples.get(source) - else "" - ), - "sample": ( - self._environment.samples[source].sample - if self._environment.samples.get(source) - else None - ), - } - capture_post( - base_url=str(self._url.geturl()), - router_name="workflow.router", - function_name="register_dc_group", - token=token, - visit_name=self._visit, - session_id=self._environment.murfey_session, - data=dcg_data, + if self._environment.visit in source.parts: + metadata_source = source + else: + metadata_source_as_str = ( + "/".join(source.parts[:-2]) + + f"/{self._environment.visit}/" + + source.parts[-2] + ) + metadata_source = Path(metadata_source_as_str.replace("//", "/")) + ensure_dcg_exists( + collection_type="spa", + metadata_source=metadata_source, + environment=self._environment, + token=self.token, ) if from_form: data = { From 6bfade70ec4dcf8d6dac16365cd5aa9bde1e6754 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Fri, 21 Nov 2025 12:29:14 +0000 Subject: [PATCH 06/34] Workflow to change experiment type id --- pyproject.toml | 1 + src/murfey/server/api/workflow.py | 18 +++++++++ src/murfey/server/ispyb.py | 28 +++++++++++++ .../register_experiment_type_update.py | 39 +++++++++++++++++++ 4 files changed, 86 insertions(+) create mode 100644 src/murfey/workflows/register_experiment_type_update.py diff --git a/pyproject.toml b/pyproject.toml index 31c733ec3..852403e9e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -109,6 +109,7 @@ GitHub = "https://github.com/DiamondLightSource/python-murfey" "clem.register_preprocessing_result" = "murfey.workflows.clem.register_preprocessing_results:run" "data_collection" = "murfey.workflows.register_data_collection:run" "data_collection_group" = "murfey.workflows.register_data_collection_group:run" +"experiment_type_update" = "murfey.workflows.register_experiment_type_update:run" "pato" = "murfey.workflows.notifications:notification_setup" "picked_particles" = "murfey.workflows.spa.picking:particles_picked" "picked_tomogram" = "murfey.workflows.tomo.picking:picked_tomogram" diff --git a/src/murfey/server/api/workflow.py b/src/murfey/server/api/workflow.py index fcc793de2..0d2b38086 100644 --- a/src/murfey/server/api/workflow.py +++ b/src/murfey/server/api/workflow.py @@ -155,6 +155,24 @@ def register_dc_group( session_id, sm.name, search_map_params, db, close_db=False ) db.close() + elif dcg_murfey := db.exec( + select(DataCollectionGroup) + .where(DataCollectionGroup.session_id == session_id) + .where(DataCollectionGroup.atlas == dcg_params.atlas) + ).all(): + # Case where we switch from atlas to processing + dcg_murfey[0].tag = dcg_params.tag or dcg_murfey[0].tag + if _transport_object: + _transport_object.send( + _transport_object.feedback_queue, + { + "register": "experiment_type_update", + "experiment_type_id": dcg_params.experiment_type_id, + "dcgid": dcg_murfey[0].id, + }, + ) + db.add(dcg_murfey[0]) + db.commit() else: dcg_parameters = { "start_time": str(datetime.now()), diff --git a/src/murfey/server/ispyb.py b/src/murfey/server/ispyb.py index 0085f81bc..47860c891 100644 --- a/src/murfey/server/ispyb.py +++ b/src/murfey/server/ispyb.py @@ -137,6 +137,34 @@ def do_insert_data_collection_group( ) return {"success": False, "return_value": None} + def do_update_data_collection_group( + self, + record: DataCollectionGroup, + message=None, + **kwargs, + ): + try: + with ISPyBSession() as db: + dcg = ( + db.query(DataCollectionGroup) + .filter( + DataCollectionGroup.dataCollectionGroupId + == record.dataCollectionGroupId + ) + .one() + ) + dcg.experimentTypeId = record.experimentTypeId + db.add(dcg) + db.commit() + return {"success": True, "return_value": record.dataCollectionGroupId} + except ispyb.ISPyBException as e: + log.error( + "Updating Data Collection Group entry caused exception '%s'.", + e, + exc_info=True, + ) + return {"success": False, "return_value": None} + def do_insert_atlas(self, record: Atlas): try: with ISPyBSession() as db: diff --git a/src/murfey/workflows/register_experiment_type_update.py b/src/murfey/workflows/register_experiment_type_update.py new file mode 100644 index 000000000..733c58c39 --- /dev/null +++ b/src/murfey/workflows/register_experiment_type_update.py @@ -0,0 +1,39 @@ +import logging +import time + +import ispyb.sqlalchemy._auto_db_schema as ISPyBDB +from sqlmodel.orm.session import Session as SQLModelSession + +from murfey.server import _transport_object + +logger = logging.getLogger("murfey.workflows.register_data_collection_group") + + +def run( + message: dict, murfey_db: SQLModelSession, demo: bool = False +) -> dict[str, bool]: + # Fail immediately if no transport wrapper is found + if _transport_object is None: + logger.error("Unable to find transport manager") + return {"success": False, "requeue": False} + + logger.info(f"Updating the experiment type for data collection group: \n{message}") + + record = ISPyBDB.DataCollectionGroup( + dataCollectionGroupId=message["dcgid"], + experimentTypeId=message["experiment_type_id"], + ) + dcgid = _transport_object.do_insert_data_collection_group(record).get( + "return_value", None + ) + + if dcgid is None: + time.sleep(2) + logger.error( + "Failed to update the following data collection group: \n" + f"{message} \n" + "Requeuing message" + ) + return {"success": False, "requeue": True} + + return {"success": True} From 2d13a05385cab44c61465b3e4910cdad976d0b37 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Fri, 21 Nov 2025 14:21:21 +0000 Subject: [PATCH 07/34] Test for atlas context --- src/murfey/client/contexts/atlas.py | 4 +- tests/client/contexts/test_atlas.py | 92 +++++++++++++++++++++++++++++ 2 files changed, 94 insertions(+), 2 deletions(-) create mode 100644 tests/client/contexts/test_atlas.py diff --git a/src/murfey/client/contexts/atlas.py b/src/murfey/client/contexts/atlas.py index 780b1f45f..c1087c285 100644 --- a/src/murfey/client/contexts/atlas.py +++ b/src/murfey/client/contexts/atlas.py @@ -46,7 +46,7 @@ def post_transfer( function_name="make_atlas_jpg", token=self._token, session_id=environment.murfey_session, - data={"path": str(transferred_atlas_name)}, + data={"path": str(transferred_atlas_name).replace("//", "/")}, ) logger.info( f"Submitted request to create JPG image of atlas {str(transferred_atlas_name)!r}" @@ -87,7 +87,7 @@ def post_transfer( dcg_data = { "experiment_type_id": 44, # Atlas "tag": str(transferred_file.parent), - "atlas": str(transferred_atlas_name), + "atlas": str(transferred_atlas_name).replace("//", "/"), "sample": sample, "atlas_pixel_size": atlas_pixel_size, } diff --git a/tests/client/contexts/test_atlas.py b/tests/client/contexts/test_atlas.py new file mode 100644 index 000000000..361c19976 --- /dev/null +++ b/tests/client/contexts/test_atlas.py @@ -0,0 +1,92 @@ +from unittest.mock import patch +from urllib.parse import urlparse + +from murfey.client.contexts.atlas import AtlasContext +from murfey.client.instance_environment import MurfeyInstanceEnvironment + + +def test_atlas_context_initialisation(tmp_path): + context = AtlasContext("tomo", tmp_path, "token") + assert context.name == "Atlas" + assert context._acquisition_software == "tomo" + assert context._basepath == tmp_path + assert context._token == "token" + + +@patch("murfey.client.contexts.atlas.capture_post") +def test_atlas_context_mrc(mock_capture_post, tmp_path): + env = MurfeyInstanceEnvironment( + url=urlparse("http://localhost:8000"), + client_id=0, + sources=[tmp_path / "cm12345-6"], + default_destinations={ + tmp_path / "cm12345-6": f"{tmp_path}/destination/cm12345-6" + }, + instrument_name="", + visit="cm12345-6", + murfey_session=1, + ) + context = AtlasContext("tomo", tmp_path, "token") + + atlas_mrc = tmp_path / "cm12345-6/Supervisor_atlas/Sample2/Atlas/Atlas_1.mrc" + atlas_mrc.parent.mkdir(parents=True) + atlas_mrc.touch() + + context.post_transfer( + atlas_mrc, + environment=env, + ) + mock_capture_post.assert_called_once_with( + base_url="http://localhost:8000", + router_name="session_control.spa_router", + function_name="make_atlas_jpg", + token="token", + session_id=1, + data={"path": f"{tmp_path}/destination/{atlas_mrc.relative_to(tmp_path)}"}, + ) + + +@patch("murfey.client.contexts.atlas.capture_post") +def test_atlas_context_xml(mock_capture_post, tmp_path): + env = MurfeyInstanceEnvironment( + url=urlparse("http://localhost:8000"), + client_id=0, + sources=[tmp_path / "cm12345-6"], + default_destinations={ + tmp_path / "cm12345-6": f"{tmp_path}/destination/cm12345-6" + }, + instrument_name="", + visit="cm12345-6", + murfey_session=1, + ) + context = AtlasContext("tomo", tmp_path, "token") + + atlas_pixel_size = 4.6 + atlas_xml = tmp_path / "cm12345-6/Supervisor_atlas/Sample2/Atlas/Atlas_1.xml" + atlas_xml.parent.mkdir(parents=True) + with open(atlas_xml, "w") as new_xml: + new_xml.write( + f"{atlas_pixel_size}" + "" + ) + + context.post_transfer( + atlas_xml, + environment=env, + ) + dcg_data = { + "experiment_type_id": 44, # Atlas + "tag": str(atlas_xml.parent), + "atlas": f"{tmp_path}/destination/{atlas_xml.relative_to(tmp_path).with_suffix('.mrc')}", + "sample": 2, + "atlas_pixel_size": atlas_pixel_size * 7.8, + } + mock_capture_post.assert_called_once_with( + base_url="http://localhost:8000", + router_name="workflow.router", + function_name="register_dc_group", + token="token", + visit_name="cm12345-6", + session_id=1, + data=dcg_data, + ) From 2b9f47830c088445850debf58e230c0c188c474e Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Fri, 21 Nov 2025 15:20:46 +0000 Subject: [PATCH 08/34] Add hook for epu session --- src/murfey/client/context.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/murfey/client/context.py b/src/murfey/client/context.py index 860c0d95b..ac110e751 100644 --- a/src/murfey/client/context.py +++ b/src/murfey/client/context.py @@ -49,6 +49,9 @@ def ensure_dcg_exists( elif collection_type == "spa": experiment_type_id = 37 session_file = metadata_source / "EpuSession.dm" + for h in entry_points(group="murfey.hooks"): + if h.name == "get_epu_session": + h.load()(session_file, environment=environment) else: logger.error(f"Unknown collection type {collection_type}") return From f258c97759c68b7204f0888a5371d23e213e281c Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Tue, 25 Nov 2025 09:36:55 +0000 Subject: [PATCH 09/34] Catch failed hook, and add some tests --- src/murfey/client/context.py | 19 +- tests/client/contexts/test_tomo.py | 267 +++++++++++++++++++++++ tests/client/test_context.py | 332 ++++++++++------------------- 3 files changed, 389 insertions(+), 229 deletions(-) create mode 100644 tests/client/contexts/test_tomo.py diff --git a/src/murfey/client/context.py b/src/murfey/client/context.py index ac110e751..36d4c9bca 100644 --- a/src/murfey/client/context.py +++ b/src/murfey/client/context.py @@ -41,7 +41,7 @@ def ensure_dcg_exists( metadata_source: Path, environment: MurfeyInstanceEnvironment, token: str, -): +) -> str | None: """Create a data collection group""" if collection_type == "tomo": experiment_type_id = 36 @@ -50,11 +50,14 @@ def ensure_dcg_exists( experiment_type_id = 37 session_file = metadata_source / "EpuSession.dm" for h in entry_points(group="murfey.hooks"): - if h.name == "get_epu_session": - h.load()(session_file, environment=environment) + try: + if h.name == "get_epu_session": + h.load()(session_file, environment=environment) + except Exception as e: + logger.warning(f"Get EPU session hook failed: {e}") else: logger.error(f"Unknown collection type {collection_type}") - return + return None if not session_file.is_file(): logger.warning(f"Cannot find session file {str(session_file)}") @@ -79,7 +82,7 @@ def ensure_dcg_exists( logger.info(f"Windows path to atlas metadata found: {windows_path}") if not windows_path: logger.warning("No atlas metadata path found") - return + return None visit_index = windows_path.split("\\").index(environment.visit) partial_path = "/".join(windows_path.split("\\")[visit_index + 1 :]) logger.info("Partial Linux path successfully constructed from Windows path") @@ -109,7 +112,7 @@ def ensure_dcg_exists( break else: logger.warning(f"Sample could not be identified for {metadata_source}") - return + return None environment.samples[metadata_source] = SampleInfo( atlas=Path(partial_path), sample=sample ) @@ -126,7 +129,7 @@ def ensure_dcg_exists( ) if not dcg_images_dirs: logger.warning(f"Cannot find Images-Disc* in {dcg_search_dir}") - return + return None dcg_tag = str(dcg_images_dirs[-1]) dcg_data = { @@ -136,7 +139,7 @@ def ensure_dcg_exists( _atlas_destination(environment, metadata_source, token) / environment.samples[metadata_source].atlas.parent / atlas_xml_path.with_suffix(".jpg").name - ), + ).replace("//", "/"), "sample": environment.samples[metadata_source].sample, "atlas_pixel_size": atlas_pixel_size, } diff --git a/tests/client/contexts/test_tomo.py b/tests/client/contexts/test_tomo.py new file mode 100644 index 000000000..4c88c9380 --- /dev/null +++ b/tests/client/contexts/test_tomo.py @@ -0,0 +1,267 @@ +from __future__ import annotations + +from pathlib import Path +from unittest.mock import patch +from urllib.parse import urlparse + +from murfey.client.contexts.tomo import TomographyContext +from murfey.client.instance_environment import MurfeyInstanceEnvironment + + +def test_tomography_context_initialisation_for_tomo(tmp_path): + context = TomographyContext("tomo", tmp_path, "") + assert not context._completed_tilt_series + assert context._acquisition_software == "tomo" + + +@patch("requests.get") +@patch("requests.post") +def test_tomography_context_add_tomo_tilt(mock_post, mock_get, tmp_path): + mock_post().status_code = 200 + + env = MurfeyInstanceEnvironment( + url=urlparse("http://localhost:8000"), + client_id=0, + sources=[tmp_path], + default_destinations={tmp_path: str(tmp_path)}, + instrument_name="", + visit="test", + murfey_session=1, + ) + context = TomographyContext("tomo", tmp_path, "") + (tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff").touch() + context.post_transfer( + tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff", + required_position_files=[], + required_strings=["fractions"], + environment=env, + ) + assert context._tilt_series == { + "Position_1": [tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff"] + } + (tmp_path / "Position_1_002_[-30.0]_date_time_fractions.tiff").touch() + context.post_transfer( + tmp_path / "Position_1_002_[-30.0]_date_time_fractions.tiff", + required_position_files=[], + required_strings=["fractions"], + environment=env, + ) + assert not context._completed_tilt_series + + # Add Position_1.mdoc, which completes this position + with open(tmp_path / "Position_1.mdoc", "w") as mdoc: + mdoc.write("[ZValue = 0]\n[ZValue = 1]\n") + context.post_transfer( + tmp_path / "Position_1.mdoc", + required_position_files=[], + required_strings=["fractions"], + environment=env, + ) + assert context._completed_tilt_series == ["Position_1"] + + # Start Position_2, this is not complete + (tmp_path / "Position_2_002_[30.0]_date_time_fractions.tiff").touch() + context.post_transfer( + tmp_path / "Position_2_002_[30.0]_date_time_fractions.tiff", + required_position_files=[], + required_strings=["fractions"], + environment=env, + ) + assert len(context._tilt_series.values()) == 2 + assert context._completed_tilt_series == ["Position_1"] + + +@patch("requests.get") +@patch("requests.post") +def test_tomography_context_add_tomo_tilt_out_of_order(mock_post, mock_get, tmp_path): + mock_post().status_code = 200 + + env = MurfeyInstanceEnvironment( + url=urlparse("http://localhost:8000"), + client_id=0, + sources=[tmp_path], + default_destinations={tmp_path: str(tmp_path)}, + instrument_name="", + visit="test", + murfey_session=1, + ) + context = TomographyContext("tomo", tmp_path, "") + (tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff").touch() + context.post_transfer( + tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff", + required_position_files=[], + required_strings=["fractions"], + environment=env, + ) + assert context._tilt_series == { + "Position_1": [tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff"] + } + (tmp_path / "Position_1_002_[-30.0]_date_time_fractions.tiff").touch() + context.post_transfer( + tmp_path / "Position_1_002_[-30.0]_date_time_fractions.tiff", + required_position_files=[], + required_strings=["fractions"], + environment=env, + ) + assert not context._completed_tilt_series + (tmp_path / "Position_2_002_[-30.0]_date_time_fractions.tiff").touch() + context.post_transfer( + tmp_path / "Position_2_002_[-30.0]_date_time_fractions.tiff", + required_position_files=[], + required_strings=["fractions"], + environment=env, + ) + assert len(context._tilt_series.values()) == 2 + assert not context._completed_tilt_series + (tmp_path / "Position_2_001_[30.0]_date_time_fractions.tiff").touch() + context.post_transfer( + tmp_path / "Position_2_001_[30.0]_date_time_fractions.tiff", + required_position_files=[], + required_strings=["fractions"], + environment=env, + ) + assert len(context._tilt_series.values()) == 2 + assert not context._completed_tilt_series + (tmp_path / "Position_3_001_[30.0]_date_time_fractions.tiff").touch() + (tmp_path / "Position_3_002_[-30.0]_date_time_fractions.tiff").touch() + context.post_transfer( + tmp_path / "Position_3_002_[-30.0]_date_time_fractions.tiff", + required_position_files=[], + required_strings=["fractions"], + environment=env, + ) + assert len(context._tilt_series.values()) == 3 + assert not context._completed_tilt_series + + # Add Position_1.mdoc, which completes this position + with open(tmp_path / "Position_1.mdoc", "w") as mdoc: + mdoc.write("[ZValue = 0]\n[ZValue = 1]\n") + context.post_transfer( + tmp_path / "Position_1.mdoc", + required_position_files=[], + required_strings=["fractions"], + environment=env, + ) + assert context._completed_tilt_series == ["Position_1"] + + # Add Position_2.mdoc, which completes this position + with open(tmp_path / "Position_2.mdoc", "w") as mdoc: + mdoc.write("[ZValue = 0]\n[ZValue = 1]\n") + context.post_transfer( + tmp_path / "Position_2.mdoc", + required_position_files=[], + required_strings=["fractions"], + environment=env, + ) + assert context._completed_tilt_series == ["Position_1", "Position_2"] + + +@patch("requests.get") +@patch("requests.post") +def test_tomography_context_add_tomo_tilt_delayed_tilt(mock_post, mock_get, tmp_path): + mock_post().status_code = 200 + + env = MurfeyInstanceEnvironment( + url=urlparse("http://localhost:8000"), + client_id=0, + sources=[tmp_path], + default_destinations={tmp_path: str(tmp_path)}, + instrument_name="", + visit="test", + murfey_session=1, + ) + context = TomographyContext("tomo", tmp_path, "") + (tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff").touch() + context.post_transfer( + tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff", + required_position_files=[], + required_strings=["fractions"], + environment=env, + ) + assert context._tilt_series == { + "Position_1": [tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff"] + } + (tmp_path / "Position_1_002_[-30.0]_date_time_fractions.tiff").touch() + context.post_transfer( + tmp_path / "Position_1_002_[-30.0]_date_time_fractions.tiff", + required_position_files=[], + required_strings=["fractions"], + environment=env, + ) + assert not context._completed_tilt_series + + # Add Position_1.mdoc, with more tilts than have been seen so far + with open(tmp_path / "Position_1.mdoc", "w") as mdoc: + mdoc.write("[ZValue = 0]\n[ZValue = 1]\n[ZValue = 2]\n") + context.post_transfer( + tmp_path / "Position_1.mdoc", + required_position_files=[], + required_strings=["fractions"], + environment=env, + ) + assert not context._completed_tilt_series + + # Now add the tilt which completes the series + (tmp_path / "Position_1_003_[60.0]_data_time_fractions.tiff").touch() + new_series = context.post_transfer( + tmp_path / "Position_1_003_[60.0]_data_time_fractions.tiff", + required_position_files=[], + required_strings=["fractions"], + environment=env, + ) + assert context._completed_tilt_series == ["Position_1"] + assert new_series == ["Position_1"] + + +def test_tomography_context_initialisation_for_serialem(tmp_path): + context = TomographyContext("serialem", tmp_path, "") + assert not context._completed_tilt_series + assert context._acquisition_software == "serialem" + + +@patch("requests.get") +@patch("requests.post") +def test_setting_tilt_series_size_and_completion_from_mdoc_parsing( + mock_post, mock_get, tmp_path +): + mock_post().status_code = 200 + + env = MurfeyInstanceEnvironment( + url=urlparse("http://localhost:8000"), + client_id=0, + sources=[tmp_path], + default_destinations={tmp_path: str(tmp_path)}, + instrument_name="", + visit="test", + murfey_session=1, + ) + context = TomographyContext("tomo", tmp_path, "") + assert len(context._tilt_series_sizes) == 0 + context.post_transfer( + Path(__file__).parent.parent / "util" / "test_1.mdoc", + environment=env, + required_strings=["fractions"], + ) + assert len(context._tilt_series_sizes) == 1 + assert context._tilt_series_sizes == {"test_1": 11} + (tmp_path / "test_1.mdoc").touch() + tilt = -50 + (tmp_path / f"test_1_001_[{tilt:.1f}]_data_time_fractions.tiff").touch() + context.post_transfer( + tmp_path / f"test_1_001_[{tilt:.1f}]_data_time_fractions.tiff", + environment=env, + required_strings=["fractions"], + ) + assert context._tilt_series == { + "test_1": [tmp_path / f"test_1_001_[{tilt:.1f}]_data_time_fractions.tiff"] + } + for i, t in enumerate(range(-40, 60, 10)): + assert not context._completed_tilt_series + (tmp_path / f"test_1_{i:03}_[{t:.1f}]_data_time_fractions.tiff").touch() + context.post_transfer( + tmp_path / f"test_1_{i:03}_[{t:.1f}]_data_time_fractions.tiff", + environment=env, + required_strings=["fractions"], + ) + assert len(context._tilt_series["test_1"]) == 11 + assert context._completed_tilt_series == ["test_1"] diff --git a/tests/client/test_context.py b/tests/client/test_context.py index 4c88c9380..e61a33be6 100644 --- a/tests/client/test_context.py +++ b/tests/client/test_context.py @@ -1,267 +1,157 @@ -from __future__ import annotations - -from pathlib import Path from unittest.mock import patch from urllib.parse import urlparse -from murfey.client.contexts.tomo import TomographyContext +from murfey.client.context import ensure_dcg_exists from murfey.client.instance_environment import MurfeyInstanceEnvironment -def test_tomography_context_initialisation_for_tomo(tmp_path): - context = TomographyContext("tomo", tmp_path, "") - assert not context._completed_tilt_series - assert context._acquisition_software == "tomo" - - -@patch("requests.get") -@patch("requests.post") -def test_tomography_context_add_tomo_tilt(mock_post, mock_get, tmp_path): - mock_post().status_code = 200 - +@patch("murfey.client.context.capture_post") +def test_ensure_dcg_exists_tomo(mock_capture_post, tmp_path): env = MurfeyInstanceEnvironment( url=urlparse("http://localhost:8000"), client_id=0, - sources=[tmp_path], - default_destinations={tmp_path: str(tmp_path)}, + sources=[tmp_path / "cm12345-6/metadata_folder"], + default_destinations={ + tmp_path + / "cm12345-6/metadata_folder": f"{tmp_path}/destination/cm12345-6/raw" + }, instrument_name="", - visit="test", + visit="cm12345-6", murfey_session=1, ) - context = TomographyContext("tomo", tmp_path, "") - (tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff").touch() - context.post_transfer( - tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff", - required_position_files=[], - required_strings=["fractions"], - environment=env, - ) - assert context._tilt_series == { - "Position_1": [tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff"] - } - (tmp_path / "Position_1_002_[-30.0]_date_time_fractions.tiff").touch() - context.post_transfer( - tmp_path / "Position_1_002_[-30.0]_date_time_fractions.tiff", - required_position_files=[], - required_strings=["fractions"], - environment=env, - ) - assert not context._completed_tilt_series - # Add Position_1.mdoc, which completes this position - with open(tmp_path / "Position_1.mdoc", "w") as mdoc: - mdoc.write("[ZValue = 0]\n[ZValue = 1]\n") - context.post_transfer( - tmp_path / "Position_1.mdoc", - required_position_files=[], - required_strings=["fractions"], - environment=env, - ) - assert context._completed_tilt_series == ["Position_1"] + metadata_source = tmp_path / "cm12345-6/metadata_folder" + metadata_source.mkdir(parents=True) + with open(metadata_source / "Session.dm", "w") as dm_file: + dm_file.write( + "" + r"X:\cm12345-6\atlas\atlas_metadata\Sample6\Atlas\Atlas.dm" + "" + ) + + atlas_xml = tmp_path / "cm12345-6/atlas/atlas_metadata/Sample6/Atlas/Atlas_4.xml" + atlas_xml.parent.mkdir(parents=True) + with open(atlas_xml, "w") as xml_file: + xml_file.write( + "4.7" + "" + ) - # Start Position_2, this is not complete - (tmp_path / "Position_2_002_[30.0]_date_time_fractions.tiff").touch() - context.post_transfer( - tmp_path / "Position_2_002_[30.0]_date_time_fractions.tiff", - required_position_files=[], - required_strings=["fractions"], + ensure_dcg_exists( + collection_type="tomo", + metadata_source=metadata_source, environment=env, + token="token", ) - assert len(context._tilt_series.values()) == 2 - assert context._completed_tilt_series == ["Position_1"] + dcg_data = { + "experiment_type_id": 36, + "tag": f"{tmp_path}/metadata_folder", + "atlas": f"{tmp_path}/destination/{atlas_xml.relative_to(tmp_path).with_suffix('.jpg')}", + "sample": 6, + "atlas_pixel_size": 4.7 * 7.8, + } + mock_capture_post.assert_called_once_with( + base_url="http://localhost:8000", + router_name="workflow.router", + function_name="register_dc_group", + token="token", + visit_name="cm12345-6", + session_id=1, + data=dcg_data, + ) -@patch("requests.get") -@patch("requests.post") -def test_tomography_context_add_tomo_tilt_out_of_order(mock_post, mock_get, tmp_path): - mock_post().status_code = 200 +@patch("murfey.client.context.capture_post") +def test_ensure_dcg_exists_spa(mock_capture_post, tmp_path): env = MurfeyInstanceEnvironment( url=urlparse("http://localhost:8000"), client_id=0, - sources=[tmp_path], - default_destinations={tmp_path: str(tmp_path)}, + sources=[tmp_path / "cm12345-6/metadata_folder"], + default_destinations={ + tmp_path + / "cm12345-6/metadata_folder": f"{tmp_path}/destination/cm12345-6/raw", + }, instrument_name="", - visit="test", + visit="cm12345-6", murfey_session=1, ) - context = TomographyContext("tomo", tmp_path, "") - (tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff").touch() - context.post_transfer( - tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff", - required_position_files=[], - required_strings=["fractions"], - environment=env, - ) - assert context._tilt_series == { - "Position_1": [tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff"] - } - (tmp_path / "Position_1_002_[-30.0]_date_time_fractions.tiff").touch() - context.post_transfer( - tmp_path / "Position_1_002_[-30.0]_date_time_fractions.tiff", - required_position_files=[], - required_strings=["fractions"], - environment=env, - ) - assert not context._completed_tilt_series - (tmp_path / "Position_2_002_[-30.0]_date_time_fractions.tiff").touch() - context.post_transfer( - tmp_path / "Position_2_002_[-30.0]_date_time_fractions.tiff", - required_position_files=[], - required_strings=["fractions"], - environment=env, - ) - assert len(context._tilt_series.values()) == 2 - assert not context._completed_tilt_series - (tmp_path / "Position_2_001_[30.0]_date_time_fractions.tiff").touch() - context.post_transfer( - tmp_path / "Position_2_001_[30.0]_date_time_fractions.tiff", - required_position_files=[], - required_strings=["fractions"], - environment=env, - ) - assert len(context._tilt_series.values()) == 2 - assert not context._completed_tilt_series - (tmp_path / "Position_3_001_[30.0]_date_time_fractions.tiff").touch() - (tmp_path / "Position_3_002_[-30.0]_date_time_fractions.tiff").touch() - context.post_transfer( - tmp_path / "Position_3_002_[-30.0]_date_time_fractions.tiff", - required_position_files=[], - required_strings=["fractions"], - environment=env, - ) - assert len(context._tilt_series.values()) == 3 - assert not context._completed_tilt_series - - # Add Position_1.mdoc, which completes this position - with open(tmp_path / "Position_1.mdoc", "w") as mdoc: - mdoc.write("[ZValue = 0]\n[ZValue = 1]\n") - context.post_transfer( - tmp_path / "Position_1.mdoc", - required_position_files=[], - required_strings=["fractions"], - environment=env, - ) - assert context._completed_tilt_series == ["Position_1"] - - # Add Position_2.mdoc, which completes this position - with open(tmp_path / "Position_2.mdoc", "w") as mdoc: - mdoc.write("[ZValue = 0]\n[ZValue = 1]\n") - context.post_transfer( - tmp_path / "Position_2.mdoc", - required_position_files=[], - required_strings=["fractions"], - environment=env, - ) - assert context._completed_tilt_series == ["Position_1", "Position_2"] + metadata_source = tmp_path / "cm12345-6/metadata_folder" + metadata_source.mkdir(parents=True) + with open(metadata_source / "EpuSession.dm", "w") as dm_file: + dm_file.write( + "<_items>" + r"X:\cm12345-6\atlas\atlas_metadata\Sample6\Atlas\Atlas.dm" + "" + ) -@patch("requests.get") -@patch("requests.post") -def test_tomography_context_add_tomo_tilt_delayed_tilt(mock_post, mock_get, tmp_path): - mock_post().status_code = 200 + # Make data location + (tmp_path / "metadata_folder/Images-Disc1").mkdir(parents=True) - env = MurfeyInstanceEnvironment( - url=urlparse("http://localhost:8000"), - client_id=0, - sources=[tmp_path], - default_destinations={tmp_path: str(tmp_path)}, - instrument_name="", - visit="test", - murfey_session=1, - ) - context = TomographyContext("tomo", tmp_path, "") - (tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff").touch() - context.post_transfer( - tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff", - required_position_files=[], - required_strings=["fractions"], - environment=env, - ) - assert context._tilt_series == { - "Position_1": [tmp_path / "Position_1_001_[30.0]_date_time_fractions.tiff"] - } - (tmp_path / "Position_1_002_[-30.0]_date_time_fractions.tiff").touch() - context.post_transfer( - tmp_path / "Position_1_002_[-30.0]_date_time_fractions.tiff", - required_position_files=[], - required_strings=["fractions"], - environment=env, - ) - assert not context._completed_tilt_series + atlas_xml = tmp_path / "cm12345-6/atlas/atlas_metadata/Sample6/Atlas/Atlas_4.xml" + atlas_xml.parent.mkdir(parents=True) + with open(atlas_xml, "w") as xml_file: + xml_file.write( + "4.7" + "" + ) - # Add Position_1.mdoc, with more tilts than have been seen so far - with open(tmp_path / "Position_1.mdoc", "w") as mdoc: - mdoc.write("[ZValue = 0]\n[ZValue = 1]\n[ZValue = 2]\n") - context.post_transfer( - tmp_path / "Position_1.mdoc", - required_position_files=[], - required_strings=["fractions"], + ensure_dcg_exists( + collection_type="spa", + metadata_source=metadata_source, environment=env, + token="token", ) - assert not context._completed_tilt_series - # Now add the tilt which completes the series - (tmp_path / "Position_1_003_[60.0]_data_time_fractions.tiff").touch() - new_series = context.post_transfer( - tmp_path / "Position_1_003_[60.0]_data_time_fractions.tiff", - required_position_files=[], - required_strings=["fractions"], - environment=env, + dcg_data = { + "experiment_type_id": 37, + "tag": f"{tmp_path}/metadata_folder/Images-Disc1", + "atlas": f"{tmp_path}/destination/{atlas_xml.relative_to(tmp_path).with_suffix('.jpg')}", + "sample": 6, + "atlas_pixel_size": 4.7 * 7.8, + } + mock_capture_post.assert_called_once_with( + base_url="http://localhost:8000", + router_name="workflow.router", + function_name="register_dc_group", + token="token", + visit_name="cm12345-6", + session_id=1, + data=dcg_data, ) - assert context._completed_tilt_series == ["Position_1"] - assert new_series == ["Position_1"] - - -def test_tomography_context_initialisation_for_serialem(tmp_path): - context = TomographyContext("serialem", tmp_path, "") - assert not context._completed_tilt_series - assert context._acquisition_software == "serialem" -@patch("requests.get") -@patch("requests.post") -def test_setting_tilt_series_size_and_completion_from_mdoc_parsing( - mock_post, mock_get, tmp_path -): - mock_post().status_code = 200 - +@patch("murfey.client.context.capture_post") +def test_ensure_dcg_exists_spa_missing_xml(mock_capture_post, tmp_path): env = MurfeyInstanceEnvironment( url=urlparse("http://localhost:8000"), client_id=0, sources=[tmp_path], default_destinations={tmp_path: str(tmp_path)}, instrument_name="", - visit="test", + visit="cm12345-6", murfey_session=1, ) - context = TomographyContext("tomo", tmp_path, "") - assert len(context._tilt_series_sizes) == 0 - context.post_transfer( - Path(__file__).parent.parent / "util" / "test_1.mdoc", - environment=env, - required_strings=["fractions"], - ) - assert len(context._tilt_series_sizes) == 1 - assert context._tilt_series_sizes == {"test_1": 11} - (tmp_path / "test_1.mdoc").touch() - tilt = -50 - (tmp_path / f"test_1_001_[{tilt:.1f}]_data_time_fractions.tiff").touch() - context.post_transfer( - tmp_path / f"test_1_001_[{tilt:.1f}]_data_time_fractions.tiff", + + metadata_source = tmp_path / "cm12345-6/metadata_folder" + ensure_dcg_exists( + collection_type="spa", + metadata_source=metadata_source, environment=env, - required_strings=["fractions"], + token="token", ) - assert context._tilt_series == { - "test_1": [tmp_path / f"test_1_001_[{tilt:.1f}]_data_time_fractions.tiff"] + + dcg_data = { + "experiment_type_id": 37, + "tag": f"{tmp_path}/metadata_folder", } - for i, t in enumerate(range(-40, 60, 10)): - assert not context._completed_tilt_series - (tmp_path / f"test_1_{i:03}_[{t:.1f}]_data_time_fractions.tiff").touch() - context.post_transfer( - tmp_path / f"test_1_{i:03}_[{t:.1f}]_data_time_fractions.tiff", - environment=env, - required_strings=["fractions"], - ) - assert len(context._tilt_series["test_1"]) == 11 - assert context._completed_tilt_series == ["test_1"] + mock_capture_post.assert_called_once_with( + base_url="http://localhost:8000", + router_name="workflow.router", + function_name="register_dc_group", + token="token", + visit_name="cm12345-6", + session_id=1, + data=dcg_data, + ) From a36648068a2fbed9dee94375f0ec47ac8cbd5584 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Tue, 25 Nov 2025 09:45:21 +0000 Subject: [PATCH 10/34] Fix test which broke on moving file --- tests/client/contexts/test_tomo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/client/contexts/test_tomo.py b/tests/client/contexts/test_tomo.py index 4c88c9380..077136185 100644 --- a/tests/client/contexts/test_tomo.py +++ b/tests/client/contexts/test_tomo.py @@ -238,7 +238,7 @@ def test_setting_tilt_series_size_and_completion_from_mdoc_parsing( context = TomographyContext("tomo", tmp_path, "") assert len(context._tilt_series_sizes) == 0 context.post_transfer( - Path(__file__).parent.parent / "util" / "test_1.mdoc", + Path(__file__).parent.parent.parent / "util" / "test_1.mdoc", environment=env, required_strings=["fractions"], ) From 1356ebab0d128c328ce4bb24d3a482d56464dead Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Tue, 25 Nov 2025 12:16:34 +0000 Subject: [PATCH 11/34] More tests --- .../register_experiment_type_update.py | 2 +- .../test_register_experiment_type_update.py | 46 +++++++++++++++++++ tests/workflows/tomo/test_tomo_metadata.py | 8 ++-- 3 files changed, 51 insertions(+), 5 deletions(-) create mode 100644 tests/workflows/test_register_experiment_type_update.py diff --git a/src/murfey/workflows/register_experiment_type_update.py b/src/murfey/workflows/register_experiment_type_update.py index 733c58c39..6d70d2e2e 100644 --- a/src/murfey/workflows/register_experiment_type_update.py +++ b/src/murfey/workflows/register_experiment_type_update.py @@ -23,7 +23,7 @@ def run( dataCollectionGroupId=message["dcgid"], experimentTypeId=message["experiment_type_id"], ) - dcgid = _transport_object.do_insert_data_collection_group(record).get( + dcgid = _transport_object.do_update_data_collection_group(record).get( "return_value", None ) diff --git a/tests/workflows/test_register_experiment_type_update.py b/tests/workflows/test_register_experiment_type_update.py new file mode 100644 index 000000000..3d159dd79 --- /dev/null +++ b/tests/workflows/test_register_experiment_type_update.py @@ -0,0 +1,46 @@ +from unittest.mock import MagicMock + +import pytest +from pytest_mock import MockerFixture + +from murfey.workflows.register_experiment_type_update import run + +register_experiment_type_update_matrix = (0, 1, None) + + +@pytest.mark.parametrize("insert_dcg", register_experiment_type_update_matrix) +def test_run( + mocker: MockerFixture, + insert_dcg: int | None, +): + # Mock the transport object functions + mock_transport_object = mocker.patch( + "murfey.workflows.register_experiment_type_update._transport_object" + ) + mock_transport_object.do_update_data_collection_group.return_value = { + "return_value": insert_dcg, + } + mock_ispyb = mocker.patch( + "murfey.workflows.register_experiment_type_update.ISPyBDB" + ) + mock_ispyb.DataCollectionGroup.return_value = "ispyb_dcg" + + # Mock the Murfey database + mock_murfey_db = MagicMock() + + # Run the function and check the results and calls + message = { + "dcgid": 1, + "experiment_type_id": 0, + } + result = run(message=message, murfey_db=mock_murfey_db) + mock_ispyb.DataCollectionGroup.assert_called_once_with( + dataCollectionGroupId=1, experimentTypeId=0 + ) + mock_transport_object.do_update_data_collection_group.assert_called_once_with( + "ispyb_dcg" + ) + if insert_dcg is not None: + assert result == {"success": True} + else: + assert result == {"success": False, "requeue": True} diff --git a/tests/workflows/tomo/test_tomo_metadata.py b/tests/workflows/tomo/test_tomo_metadata.py index c1c458224..3925a1c95 100644 --- a/tests/workflows/tomo/test_tomo_metadata.py +++ b/tests/workflows/tomo/test_tomo_metadata.py @@ -135,12 +135,12 @@ def test_register_search_map_update_with_all_parameters( assert sm_final_parameters.y_location is not None # Check this would have updated ispyb - mock_transport.do_update_search_map.assert_called_with(1, new_parameters) + mock_transport.do_update_search_map.assert_any_call(1, new_parameters) new_parameters.x_location = sm_final_parameters.x_location new_parameters.y_location = sm_final_parameters.y_location - new_parameters.height_on_atlas = 40 - new_parameters.width_on_atlas = 20 - mock_transport.do_update_search_map.assert_called_with(1, new_parameters) + new_parameters.height_on_atlas = int(40 * 7.8) + new_parameters.width_on_atlas = int(20 * 7.8) + mock_transport.do_update_search_map.assert_any_call(1, new_parameters) @mock.patch("murfey.workflows.tomo.tomo_metadata._transport_object") From 8f21e09019f125d2ca86b7c499023a315d2658db Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Tue, 25 Nov 2025 14:12:36 +0000 Subject: [PATCH 12/34] Try and test ispyb db update --- tests/server/test_ispyb.py | 37 ++++++++++++++++++++++++++++++++++--- 1 file changed, 34 insertions(+), 3 deletions(-) diff --git a/tests/server/test_ispyb.py b/tests/server/test_ispyb.py index d60ada7bc..63550753d 100644 --- a/tests/server/test_ispyb.py +++ b/tests/server/test_ispyb.py @@ -1,10 +1,12 @@ -from ispyb.sqlalchemy import BLSession, Proposal +from unittest import mock + +from ispyb.sqlalchemy import BLSession, DataCollectionGroup, Proposal from pytest import mark from sqlalchemy import select from sqlalchemy.orm import Session -from murfey.server.ispyb import get_proposal_id, get_session_id -from tests.conftest import ExampleVisit +from murfey.server.ispyb import TransportManager, get_proposal_id, get_session_id +from tests.conftest import ExampleVisit, get_or_create_db_entry def test_get_session_id( @@ -67,3 +69,32 @@ def test_get_sub_samples_from_visit(): @mark.skip def test_get_all_ongoing_visits(): pass + + +@mock.patch("workflows.transport.pika_transport.PikaTransport") +def test_update_data_collection_group(mock_transport, ispyb_db_session: Session): + get_or_create_db_entry( + session=ispyb_db_session, + table=DataCollectionGroup, + insert_kwargs={ + "dataCollectionGroup": 1, + "experimentType": 1, + }, + ) + + transport_manager = TransportManager("PikaTransport") + mock_transport().connect.assert_called_once() + + with mock.patch("murfey.server.ispyb.ISPyBSession", ispyb_db_session): + transport_manager.do_update_data_collection_group( + record=DataCollectionGroup(dataCollectionGroupId=1, experimentTypeId=2) + ) + + final_dcg_entry = get_or_create_db_entry( + session=ispyb_db_session, + table=DataCollectionGroup, + lookup_kwargs={ + "dataCollectionGroup": 1, + }, + ) + assert final_dcg_entry.experimentTypeId == 2 From 3b02ae60ba1f534d937e6de9176dda8f7400b76e Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Tue, 25 Nov 2025 15:04:04 +0000 Subject: [PATCH 13/34] Sort out ids --- tests/server/test_ispyb.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/tests/server/test_ispyb.py b/tests/server/test_ispyb.py index 63550753d..602e4c60c 100644 --- a/tests/server/test_ispyb.py +++ b/tests/server/test_ispyb.py @@ -77,8 +77,8 @@ def test_update_data_collection_group(mock_transport, ispyb_db_session: Session) session=ispyb_db_session, table=DataCollectionGroup, insert_kwargs={ - "dataCollectionGroup": 1, - "experimentType": 1, + "dataCollectionGroupId": 1, + "experimentTypeId": 1, }, ) @@ -93,8 +93,6 @@ def test_update_data_collection_group(mock_transport, ispyb_db_session: Session) final_dcg_entry = get_or_create_db_entry( session=ispyb_db_session, table=DataCollectionGroup, - lookup_kwargs={ - "dataCollectionGroup": 1, - }, + lookup_kwargs={"dataCollectionGroupId": 1}, ) assert final_dcg_entry.experimentTypeId == 2 From 1de4a89c624ac30cc76bcf4a1abac7ba1b7b97ea Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Tue, 25 Nov 2025 15:11:20 +0000 Subject: [PATCH 14/34] Try more comprehensive tomo metadata test --- tests/server/test_ispyb.py | 1 + tests/workflows/tomo/test_tomo_metadata.py | 16 ++++++++++------ 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/tests/server/test_ispyb.py b/tests/server/test_ispyb.py index 602e4c60c..a22047b7f 100644 --- a/tests/server/test_ispyb.py +++ b/tests/server/test_ispyb.py @@ -78,6 +78,7 @@ def test_update_data_collection_group(mock_transport, ispyb_db_session: Session) table=DataCollectionGroup, insert_kwargs={ "dataCollectionGroupId": 1, + "sessionId": 0, "experimentTypeId": 1, }, ) diff --git a/tests/workflows/tomo/test_tomo_metadata.py b/tests/workflows/tomo/test_tomo_metadata.py index 3925a1c95..ae32117f0 100644 --- a/tests/workflows/tomo/test_tomo_metadata.py +++ b/tests/workflows/tomo/test_tomo_metadata.py @@ -135,12 +135,16 @@ def test_register_search_map_update_with_all_parameters( assert sm_final_parameters.y_location is not None # Check this would have updated ispyb - mock_transport.do_update_search_map.assert_any_call(1, new_parameters) - new_parameters.x_location = sm_final_parameters.x_location - new_parameters.y_location = sm_final_parameters.y_location - new_parameters.height_on_atlas = int(40 * 7.8) - new_parameters.width_on_atlas = int(20 * 7.8) - mock_transport.do_update_search_map.assert_any_call(1, new_parameters) + update_args = mock_transport.do_update_search_map.call_args_list + assert len(update_args) == 2 + assert update_args[0][0] == 1 + assert update_args[1][0] == 1 + assert update_args[0][1].x_location == new_parameters.x_location + assert update_args[0][1].y_location == new_parameters.y_location + assert update_args[1][1].x_location == sm_final_parameters.x_location + assert update_args[1][1].x_location == sm_final_parameters.y_location + assert update_args[1][1].height_on_atlas == int(40 * 7.8) + assert update_args[1][1].width_on_atlas == int(20 * 7.8) @mock.patch("murfey.workflows.tomo.tomo_metadata._transport_object") From b15a4638c3ab4cd53e69f787a11a0412ec3ccdbc Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Tue, 25 Nov 2025 15:28:45 +0000 Subject: [PATCH 15/34] Try fixing tests --- tests/server/test_ispyb.py | 17 ++++++++++++++++- tests/workflows/tomo/test_tomo_metadata.py | 16 ++++++++-------- 2 files changed, 24 insertions(+), 9 deletions(-) diff --git a/tests/server/test_ispyb.py b/tests/server/test_ispyb.py index a22047b7f..686a96ea3 100644 --- a/tests/server/test_ispyb.py +++ b/tests/server/test_ispyb.py @@ -73,12 +73,27 @@ def test_get_all_ongoing_visits(): @mock.patch("workflows.transport.pika_transport.PikaTransport") def test_update_data_collection_group(mock_transport, ispyb_db_session: Session): + # Manually get the BLSession ID for comparison + bl_session_id = ( + ispyb_db_session.execute( + select(BLSession) + .join(Proposal) + .where(BLSession.proposalId == Proposal.proposalId) + .where(BLSession.beamLineName == ExampleVisit.instrument_name) + .where(Proposal.proposalCode == ExampleVisit.proposal_code) + .where(Proposal.proposalNumber == str(ExampleVisit.proposal_number)) + .where(BLSession.visit_number == ExampleVisit.visit_number) + ) + .scalar_one() + .sessionId + ) + # Insert data collection group get_or_create_db_entry( session=ispyb_db_session, table=DataCollectionGroup, insert_kwargs={ "dataCollectionGroupId": 1, - "sessionId": 0, + "sessionId": bl_session_id, "experimentTypeId": 1, }, ) diff --git a/tests/workflows/tomo/test_tomo_metadata.py b/tests/workflows/tomo/test_tomo_metadata.py index ae32117f0..63f36df35 100644 --- a/tests/workflows/tomo/test_tomo_metadata.py +++ b/tests/workflows/tomo/test_tomo_metadata.py @@ -137,14 +137,14 @@ def test_register_search_map_update_with_all_parameters( # Check this would have updated ispyb update_args = mock_transport.do_update_search_map.call_args_list assert len(update_args) == 2 - assert update_args[0][0] == 1 - assert update_args[1][0] == 1 - assert update_args[0][1].x_location == new_parameters.x_location - assert update_args[0][1].y_location == new_parameters.y_location - assert update_args[1][1].x_location == sm_final_parameters.x_location - assert update_args[1][1].x_location == sm_final_parameters.y_location - assert update_args[1][1].height_on_atlas == int(40 * 7.8) - assert update_args[1][1].width_on_atlas == int(20 * 7.8) + assert update_args[0][0][0] == 1 + assert update_args[0][1][0] == 1 + assert update_args[0][0][1].x_stage_position == 0.3 + assert update_args[0][0][1].y_stage_position == 0.4 + assert update_args[0][1][1].x_location == sm_final_parameters.x_location + assert update_args[0][1][1].x_location == sm_final_parameters.y_location + assert update_args[0][1][1].height_on_atlas == int(40 * 7.8) + assert update_args[0][1][1].width_on_atlas == int(20 * 7.8) @mock.patch("murfey.workflows.tomo.tomo_metadata._transport_object") From 3a76ca5217d9de8c48ddf43fa5db22ab59fa2473 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Tue, 25 Nov 2025 16:17:46 +0000 Subject: [PATCH 16/34] test transport init --- tests/server/test_ispyb.py | 6 ++++++ tests/workflows/tomo/test_tomo_metadata.py | 11 ++++++----- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/tests/server/test_ispyb.py b/tests/server/test_ispyb.py index 686a96ea3..c23442bb5 100644 --- a/tests/server/test_ispyb.py +++ b/tests/server/test_ispyb.py @@ -71,6 +71,12 @@ def test_get_all_ongoing_visits(): pass +@mock.patch("workflows.transport.pika_transport.PikaTransport") +def test_transport_manager_init(mock_transport): + TransportManager("PikaTransport") + mock_transport().connect.assert_called_once() + + @mock.patch("workflows.transport.pika_transport.PikaTransport") def test_update_data_collection_group(mock_transport, ispyb_db_session: Session): # Manually get the BLSession ID for comparison diff --git a/tests/workflows/tomo/test_tomo_metadata.py b/tests/workflows/tomo/test_tomo_metadata.py index 63f36df35..c8b260d32 100644 --- a/tests/workflows/tomo/test_tomo_metadata.py +++ b/tests/workflows/tomo/test_tomo_metadata.py @@ -137,14 +137,15 @@ def test_register_search_map_update_with_all_parameters( # Check this would have updated ispyb update_args = mock_transport.do_update_search_map.call_args_list assert len(update_args) == 2 + print(update_args) assert update_args[0][0][0] == 1 - assert update_args[0][1][0] == 1 + assert update_args[1][0][0] == 1 assert update_args[0][0][1].x_stage_position == 0.3 assert update_args[0][0][1].y_stage_position == 0.4 - assert update_args[0][1][1].x_location == sm_final_parameters.x_location - assert update_args[0][1][1].x_location == sm_final_parameters.y_location - assert update_args[0][1][1].height_on_atlas == int(40 * 7.8) - assert update_args[0][1][1].width_on_atlas == int(20 * 7.8) + assert update_args[1][0][1].x_location == sm_final_parameters.x_location + assert update_args[1][0][1].x_location == sm_final_parameters.y_location + assert update_args[1][0][1].height_on_atlas == int(40 * 7.8) + assert update_args[1][0][1].width_on_atlas == int(20 * 7.8) @mock.patch("murfey.workflows.tomo.tomo_metadata._transport_object") From 46012abc507414d72fc9dfc8f2eee24a8d0bcdb8 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Tue, 25 Nov 2025 16:28:11 +0000 Subject: [PATCH 17/34] Find ou twha the calls are --- tests/server/test_ispyb.py | 1 + tests/workflows/tomo/test_tomo_metadata.py | 7 +++---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/server/test_ispyb.py b/tests/server/test_ispyb.py index c23442bb5..de0289987 100644 --- a/tests/server/test_ispyb.py +++ b/tests/server/test_ispyb.py @@ -74,6 +74,7 @@ def test_get_all_ongoing_visits(): @mock.patch("workflows.transport.pika_transport.PikaTransport") def test_transport_manager_init(mock_transport): TransportManager("PikaTransport") + print(mock_transport.mock_calls) mock_transport().connect.assert_called_once() diff --git a/tests/workflows/tomo/test_tomo_metadata.py b/tests/workflows/tomo/test_tomo_metadata.py index c8b260d32..94d6adb5b 100644 --- a/tests/workflows/tomo/test_tomo_metadata.py +++ b/tests/workflows/tomo/test_tomo_metadata.py @@ -137,15 +137,14 @@ def test_register_search_map_update_with_all_parameters( # Check this would have updated ispyb update_args = mock_transport.do_update_search_map.call_args_list assert len(update_args) == 2 - print(update_args) assert update_args[0][0][0] == 1 assert update_args[1][0][0] == 1 assert update_args[0][0][1].x_stage_position == 0.3 assert update_args[0][0][1].y_stage_position == 0.4 assert update_args[1][0][1].x_location == sm_final_parameters.x_location - assert update_args[1][0][1].x_location == sm_final_parameters.y_location - assert update_args[1][0][1].height_on_atlas == int(40 * 7.8) - assert update_args[1][0][1].width_on_atlas == int(20 * 7.8) + assert update_args[1][0][1].y_location == sm_final_parameters.y_location + assert update_args[1][0][1].height_on_atlas == int(4000 * 1e-5 / 1e-7 * 7.8) + assert update_args[1][0][1].width_on_atlas == int(2000 * 1e-5 / 1e-7 * 7.8) @mock.patch("murfey.workflows.tomo.tomo_metadata._transport_object") From 4d8d0d75c5a4de542607d0f869b304545985275b Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Tue, 25 Nov 2025 16:55:19 +0000 Subject: [PATCH 18/34] Keep trying --- tests/server/test_ispyb.py | 6 ++---- tests/workflows/tomo/test_tomo_metadata.py | 4 ++-- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/tests/server/test_ispyb.py b/tests/server/test_ispyb.py index de0289987..5f2356595 100644 --- a/tests/server/test_ispyb.py +++ b/tests/server/test_ispyb.py @@ -73,8 +73,8 @@ def test_get_all_ongoing_visits(): @mock.patch("workflows.transport.pika_transport.PikaTransport") def test_transport_manager_init(mock_transport): - TransportManager("PikaTransport") - print(mock_transport.mock_calls) + transport_manager = TransportManager("PikaTransport") + print(mock_transport.mock_calls, transport_manager.transport) mock_transport().connect.assert_called_once() @@ -106,8 +106,6 @@ def test_update_data_collection_group(mock_transport, ispyb_db_session: Session) ) transport_manager = TransportManager("PikaTransport") - mock_transport().connect.assert_called_once() - with mock.patch("murfey.server.ispyb.ISPyBSession", ispyb_db_session): transport_manager.do_update_data_collection_group( record=DataCollectionGroup(dataCollectionGroupId=1, experimentTypeId=2) diff --git a/tests/workflows/tomo/test_tomo_metadata.py b/tests/workflows/tomo/test_tomo_metadata.py index 94d6adb5b..61e94ff2c 100644 --- a/tests/workflows/tomo/test_tomo_metadata.py +++ b/tests/workflows/tomo/test_tomo_metadata.py @@ -143,8 +143,8 @@ def test_register_search_map_update_with_all_parameters( assert update_args[0][0][1].y_stage_position == 0.4 assert update_args[1][0][1].x_location == sm_final_parameters.x_location assert update_args[1][0][1].y_location == sm_final_parameters.y_location - assert update_args[1][0][1].height_on_atlas == int(4000 * 1e-5 / 1e-7 * 7.8) - assert update_args[1][0][1].width_on_atlas == int(2000 * 1e-5 / 1e-7 * 7.8) + assert update_args[1][0][1].height_on_atlas == 311 + assert update_args[1][0][1].width_on_atlas == 155 @mock.patch("murfey.workflows.tomo.tomo_metadata._transport_object") From 4f99666fe047beba2ca08abb11f33805f65b4aa2 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Tue, 25 Nov 2025 17:07:13 +0000 Subject: [PATCH 19/34] Don't bother iwth the connect --- tests/server/test_ispyb.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/tests/server/test_ispyb.py b/tests/server/test_ispyb.py index 5f2356595..bfeddb001 100644 --- a/tests/server/test_ispyb.py +++ b/tests/server/test_ispyb.py @@ -71,13 +71,6 @@ def test_get_all_ongoing_visits(): pass -@mock.patch("workflows.transport.pika_transport.PikaTransport") -def test_transport_manager_init(mock_transport): - transport_manager = TransportManager("PikaTransport") - print(mock_transport.mock_calls, transport_manager.transport) - mock_transport().connect.assert_called_once() - - @mock.patch("workflows.transport.pika_transport.PikaTransport") def test_update_data_collection_group(mock_transport, ispyb_db_session: Session): # Manually get the BLSession ID for comparison @@ -106,7 +99,7 @@ def test_update_data_collection_group(mock_transport, ispyb_db_session: Session) ) transport_manager = TransportManager("PikaTransport") - with mock.patch("murfey.server.ispyb.ISPyBSession", ispyb_db_session): + with mock.patch("murfey.server.ispyb.ISPyBSession.__call__", ispyb_db_session): transport_manager.do_update_data_collection_group( record=DataCollectionGroup(dataCollectionGroupId=1, experimentTypeId=2) ) From dcb94e5625922010a4b7b18c3303d64a99f8f4b0 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Tue, 25 Nov 2025 17:13:28 +0000 Subject: [PATCH 20/34] More layers --- tests/server/test_ispyb.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/server/test_ispyb.py b/tests/server/test_ispyb.py index bfeddb001..c3731758d 100644 --- a/tests/server/test_ispyb.py +++ b/tests/server/test_ispyb.py @@ -99,7 +99,9 @@ def test_update_data_collection_group(mock_transport, ispyb_db_session: Session) ) transport_manager = TransportManager("PikaTransport") - with mock.patch("murfey.server.ispyb.ISPyBSession.__call__", ispyb_db_session): + with mock.patch( + "murfey.server.ispyb.ISPyBSession.__call__.__enter__", ispyb_db_session + ): transport_manager.do_update_data_collection_group( record=DataCollectionGroup(dataCollectionGroupId=1, experimentTypeId=2) ) From f33c945a0d6dc0270bd6f343fae7f7b998712ec6 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Wed, 26 Nov 2025 09:29:14 +0000 Subject: [PATCH 21/34] Print object to see what it is --- src/murfey/server/ispyb.py | 1 + tests/server/test_ispyb.py | 4 +--- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/murfey/server/ispyb.py b/src/murfey/server/ispyb.py index 47860c891..3649746c7 100644 --- a/src/murfey/server/ispyb.py +++ b/src/murfey/server/ispyb.py @@ -124,6 +124,7 @@ def do_insert_data_collection_group( **kwargs, ): try: + print(ISPyBSession()) with ISPyBSession() as db: db.add(record) db.commit() diff --git a/tests/server/test_ispyb.py b/tests/server/test_ispyb.py index c3731758d..bfeddb001 100644 --- a/tests/server/test_ispyb.py +++ b/tests/server/test_ispyb.py @@ -99,9 +99,7 @@ def test_update_data_collection_group(mock_transport, ispyb_db_session: Session) ) transport_manager = TransportManager("PikaTransport") - with mock.patch( - "murfey.server.ispyb.ISPyBSession.__call__.__enter__", ispyb_db_session - ): + with mock.patch("murfey.server.ispyb.ISPyBSession.__call__", ispyb_db_session): transport_manager.do_update_data_collection_group( record=DataCollectionGroup(dataCollectionGroupId=1, experimentTypeId=2) ) From 1a496f89fd7a54502210267247d5e5d8e511d165 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Wed, 26 Nov 2025 09:37:58 +0000 Subject: [PATCH 22/34] Different way of patching --- src/murfey/server/ispyb.py | 1 - tests/server/test_ispyb.py | 14 +++++++++----- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/src/murfey/server/ispyb.py b/src/murfey/server/ispyb.py index 3649746c7..47860c891 100644 --- a/src/murfey/server/ispyb.py +++ b/src/murfey/server/ispyb.py @@ -124,7 +124,6 @@ def do_insert_data_collection_group( **kwargs, ): try: - print(ISPyBSession()) with ISPyBSession() as db: db.add(record) db.commit() diff --git a/tests/server/test_ispyb.py b/tests/server/test_ispyb.py index bfeddb001..7352c3588 100644 --- a/tests/server/test_ispyb.py +++ b/tests/server/test_ispyb.py @@ -72,7 +72,12 @@ def test_get_all_ongoing_visits(): @mock.patch("workflows.transport.pika_transport.PikaTransport") -def test_update_data_collection_group(mock_transport, ispyb_db_session: Session): +@mock.patch("murfey.server.ispyb.ISPyBSession.__call__") +def test_update_data_collection_group( + mock_ispyb_session_call, mock_transport, ispyb_db_session: Session +): + mock_ispyb_session_call.return_value = ispyb_db_session + # Manually get the BLSession ID for comparison bl_session_id = ( ispyb_db_session.execute( @@ -99,10 +104,9 @@ def test_update_data_collection_group(mock_transport, ispyb_db_session: Session) ) transport_manager = TransportManager("PikaTransport") - with mock.patch("murfey.server.ispyb.ISPyBSession.__call__", ispyb_db_session): - transport_manager.do_update_data_collection_group( - record=DataCollectionGroup(dataCollectionGroupId=1, experimentTypeId=2) - ) + transport_manager.do_update_data_collection_group( + record=DataCollectionGroup(dataCollectionGroupId=1, experimentTypeId=2) + ) final_dcg_entry = get_or_create_db_entry( session=ispyb_db_session, From c654ddd088a5d3b6451caeff5cf478815184c361 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Wed, 26 Nov 2025 09:51:22 +0000 Subject: [PATCH 23/34] Mock without the call method --- tests/server/test_ispyb.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/tests/server/test_ispyb.py b/tests/server/test_ispyb.py index 7352c3588..128e4bf2c 100644 --- a/tests/server/test_ispyb.py +++ b/tests/server/test_ispyb.py @@ -72,12 +72,7 @@ def test_get_all_ongoing_visits(): @mock.patch("workflows.transport.pika_transport.PikaTransport") -@mock.patch("murfey.server.ispyb.ISPyBSession.__call__") -def test_update_data_collection_group( - mock_ispyb_session_call, mock_transport, ispyb_db_session: Session -): - mock_ispyb_session_call.return_value = ispyb_db_session - +def test_update_data_collection_group(mock_transport, ispyb_db_session: Session): # Manually get the BLSession ID for comparison bl_session_id = ( ispyb_db_session.execute( @@ -104,9 +99,10 @@ def test_update_data_collection_group( ) transport_manager = TransportManager("PikaTransport") - transport_manager.do_update_data_collection_group( - record=DataCollectionGroup(dataCollectionGroupId=1, experimentTypeId=2) - ) + with mock.patch("murfey.server.ispyb.ISPyBSession", ispyb_db_session): + transport_manager.do_update_data_collection_group( + record=DataCollectionGroup(dataCollectionGroupId=1, experimentTypeId=2) + ) final_dcg_entry = get_or_create_db_entry( session=ispyb_db_session, From dc35e9f0e59e84f90bb5586d9a94f93489de6654 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Wed, 26 Nov 2025 10:02:13 +0000 Subject: [PATCH 24/34] Mock as return value --- tests/server/test_ispyb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/server/test_ispyb.py b/tests/server/test_ispyb.py index 128e4bf2c..58f2f2bbc 100644 --- a/tests/server/test_ispyb.py +++ b/tests/server/test_ispyb.py @@ -99,7 +99,7 @@ def test_update_data_collection_group(mock_transport, ispyb_db_session: Session) ) transport_manager = TransportManager("PikaTransport") - with mock.patch("murfey.server.ispyb.ISPyBSession", ispyb_db_session): + with mock.patch("murfey.server.ispyb.ISPyBSession", return_value=ispyb_db_session): transport_manager.do_update_data_collection_group( record=DataCollectionGroup(dataCollectionGroupId=1, experimentTypeId=2) ) From 2da2b4d4c05a7517f4d8eaa2d9df0255c0078079 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Thu, 27 Nov 2025 11:47:59 +0000 Subject: [PATCH 25/34] DCGs are based on atlas jpg name --- src/murfey/client/contexts/atlas.py | 8 ++++---- tests/client/contexts/test_atlas.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/murfey/client/contexts/atlas.py b/src/murfey/client/contexts/atlas.py index c1087c285..5c95da618 100644 --- a/src/murfey/client/contexts/atlas.py +++ b/src/murfey/client/contexts/atlas.py @@ -59,9 +59,9 @@ def post_transfer( source = _get_source(transferred_file, environment) if source: atlas_mrc = transferred_file.with_suffix(".mrc") - transferred_atlas_name = _atlas_destination( + transferred_atlas_jpg = _atlas_destination( environment, source, atlas_mrc, self._token - ) / atlas_mrc.relative_to(source.parent) + ) / atlas_mrc.relative_to(source.parent).with_suffix(".jpg") with open(transferred_file, "rb") as atlas_xml: atlas_xml_data = xmltodict.parse(atlas_xml) @@ -87,7 +87,7 @@ def post_transfer( dcg_data = { "experiment_type_id": 44, # Atlas "tag": str(transferred_file.parent), - "atlas": str(transferred_atlas_name).replace("//", "/"), + "atlas": str(transferred_atlas_jpg).replace("//", "/"), "sample": sample, "atlas_pixel_size": atlas_pixel_size, } @@ -101,5 +101,5 @@ def post_transfer( data=dcg_data, ) logger.info( - f"Registered data collection group for atlas {str(transferred_atlas_name)!r}" + f"Registered data collection group for atlas {str(transferred_atlas_jpg)!r}" ) diff --git a/tests/client/contexts/test_atlas.py b/tests/client/contexts/test_atlas.py index 361c19976..b56f01621 100644 --- a/tests/client/contexts/test_atlas.py +++ b/tests/client/contexts/test_atlas.py @@ -77,7 +77,7 @@ def test_atlas_context_xml(mock_capture_post, tmp_path): dcg_data = { "experiment_type_id": 44, # Atlas "tag": str(atlas_xml.parent), - "atlas": f"{tmp_path}/destination/{atlas_xml.relative_to(tmp_path).with_suffix('.mrc')}", + "atlas": f"{tmp_path}/destination/{atlas_xml.relative_to(tmp_path).with_suffix('.jpg')}", "sample": 2, "atlas_pixel_size": atlas_pixel_size * 7.8, } From b6ba908b08d56e225e499005924d88b33dc4dd06 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Thu, 27 Nov 2025 15:09:22 +0000 Subject: [PATCH 26/34] Second atlases are possible, and need to avoid experiment type flip-flops --- src/murfey/server/api/workflow.py | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/src/murfey/server/api/workflow.py b/src/murfey/server/api/workflow.py index 0d2b38086..cd4056628 100644 --- a/src/murfey/server/api/workflow.py +++ b/src/murfey/server/api/workflow.py @@ -106,11 +106,24 @@ def register_dc_group( db.exec(select(Session).where(Session.id == session_id)).one().instrument_name ) logger.info(f"Registering data collection group on microscope {instrument_name}") - if dcg_murfey := db.exec( - select(DataCollectionGroup) - .where(DataCollectionGroup.session_id == session_id) - .where(DataCollectionGroup.tag == dcg_params.tag) - ).all(): + if ( + dcg_murfey := db.exec( + select(DataCollectionGroup) + .where(DataCollectionGroup.session_id == session_id) + .where(DataCollectionGroup.tag == dcg_params.tag) + ).all() + ) or ( + ( + dcg_murfey := db.exec( + select(DataCollectionGroup) + .where(DataCollectionGroup.session_id == session_id) + .where(DataCollectionGroup.sample == dcg_params.sample) + ).all() + ) + and dcg_params.experiment_type_id == 44 + ): + # Either switching atlas for a common (atlas or processing) tag + # Or registering a new atlas-type dcg for a sample that is already present dcg_murfey[0].atlas = dcg_params.atlas or dcg_murfey[0].atlas dcg_murfey[0].sample = dcg_params.sample or dcg_murfey[0].sample dcg_murfey[0].atlas_pixel_size = ( @@ -158,7 +171,7 @@ def register_dc_group( elif dcg_murfey := db.exec( select(DataCollectionGroup) .where(DataCollectionGroup.session_id == session_id) - .where(DataCollectionGroup.atlas == dcg_params.atlas) + .where(DataCollectionGroup.sample == dcg_params.sample) ).all(): # Case where we switch from atlas to processing dcg_murfey[0].tag = dcg_params.tag or dcg_murfey[0].tag From 01b12835db6d01dd3efa15bbbf9b647c701e8f77 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Thu, 27 Nov 2025 16:03:28 +0000 Subject: [PATCH 27/34] Add some tests for the api dcg insert --- tests/server/api/test_workflow.py | 324 ++++++++++++++++++++++++++++++ 1 file changed, 324 insertions(+) create mode 100644 tests/server/api/test_workflow.py diff --git a/tests/server/api/test_workflow.py b/tests/server/api/test_workflow.py new file mode 100644 index 000000000..aa0753ded --- /dev/null +++ b/tests/server/api/test_workflow.py @@ -0,0 +1,324 @@ +from unittest import mock + +from sqlmodel import Session, select + +from murfey.server.api.workflow import register_dc_group +from murfey.util.db import DataCollectionGroup, SearchMap +from tests.conftest import ExampleVisit + + +@mock.patch("murfey.server.workflow._transport_object") +def test_register_dc_group_new_dcg(mock_transport, murfey_db_session: Session): + """Test the request for a completely new data collection group""" + mock_transport.feedback_queue = "mock_feedback_queue" + + # Request new dcg registration + dcg_params = { + "experiment_type_id": 44, + "tag": "atlas_tag", + "atlas": "/path/to/Atlas_1.jpg", + "sample": 1, + "atlas_pixel_size": 1e-5, + } + register_dc_group( + visit_name="cm12345-6", + session_id=ExampleVisit.murfey_session_id, + dcg_params=dcg_params, + db=murfey_db_session, + ) + + # Check request for registering dcg in ispyb and murfey + mock_transport.send.assert_called_once_with( + "mock_feedback_queue", + { + "register": "data_collection_group", + "start_time": mock.ANY, + "experiment_type_id": dcg_params["experiment_type_id"], + "tag": dcg_params["tag"], + "session_id": ExampleVisit.murfey_session_id, + "atlas": dcg_params["atlas"], + "sample": dcg_params["sample"], + "atlas_pixel_size": dcg_params["atlas_pixel_size"], + "microscope": ExampleVisit.instrument_name, + "proposal_code": ExampleVisit.proposal_code, + "proposal_number": ExampleVisit.proposal_number, + "visit_number": ExampleVisit.visit_number, + }, + ) + + +@mock.patch("murfey.server.workflow._transport_object") +def test_register_dc_group_atlas_to_processing( + mock_transport, murfey_db_session: Session +): + """ + Test the request to update an existing data collection group + from atlas type 44 to a processing type with a different tag + """ + mock_transport.feedback_queue = "mock_feedback_queue" + + # Make sure dcg is present + dcg = DataCollectionGroup( + id=1, + session_id=ExampleVisit.murfey_session_id, + tag="atlas_tag", + atlas_id=90, + atlas_pixel_size=1e-5, + sample=1, + atlas="/path/to/Atlas_1.jpg", + ) + murfey_db_session.add(dcg) + murfey_db_session.commit() + + # Request new dcg registration with processing experiment type and tag + dcg_params = { + "experiment_type_id": 36, + "tag": "processing_tag", + "atlas": "/path/to/Atlas_1.jpg", + "sample": 1, + "atlas_pixel_size": 1e-5, + } + register_dc_group( + visit_name="cm12345-6", + session_id=ExampleVisit.murfey_session_id, + dcg_params=dcg_params, + db=murfey_db_session, + ) + + # Check request to ispyb for updating the experiment type + mock_transport.send.assert_called_once_with( + "mock_feedback_queue", + { + "register": "experiment_type_update", + "experiment_type_id": dcg_params["experiment_type_id"], + "dcgid": dcg.id, + }, + ) + + # Check that the tag of the data collection group was updated + new_dcg = murfey_db_session.exec( + select(DataCollectionGroup).where(DataCollectionGroup.id == dcg.id) + ).one() + assert new_dcg.tag == dcg_params["tag"] + + +@mock.patch("murfey.server.workflow._transport_object") +def test_register_dc_group_processing_to_atlas( + mock_transport, murfey_db_session: Session +): + """ + Test the request to update an existing data collection group + of processing type with a new atlas type 44, which should leave the tag unchanged + """ + mock_transport.feedback_queue = "mock_feedback_queue" + + # Make sure dcg is present + dcg = DataCollectionGroup( + id=1, + session_id=ExampleVisit.murfey_session_id, + tag="processing_tag", + atlas_id=90, + atlas_pixel_size=1e-5, + sample=1, + atlas="/path/to/Atlas_1.jpg", + ) + murfey_db_session.add(dcg) + murfey_db_session.commit() + + # Request new dcg registration with atlas experiment type and tag + dcg_params = { + "experiment_type_id": 44, + "tag": "atlas_tag", + "atlas": "/path/to/Atlas_2.jpg", + "sample": 1, + "atlas_pixel_size": 1e-4, + } + register_dc_group( + visit_name="cm12345-6", + session_id=ExampleVisit.murfey_session_id, + dcg_params=dcg_params, + db=murfey_db_session, + ) + + # Check request to ispyb for updating the experiment type + mock_transport.send.assert_called_once_with( + "mock_feedback_queue", + { + "register": "atlas_update", + "atlas_id": dcg.atlas_id, + "atlas": dcg_params["atlas"], + "sample": dcg_params["sample"], + "atlas_pixel_size": dcg_params["atlas_pixel_size"], + "dcgid": dcg.id, + "session_id": ExampleVisit.murfey_session_id, + }, + ) + + # Check the data collection group atlas was updated + new_dcg = murfey_db_session.exec( + select(DataCollectionGroup).where(DataCollectionGroup.id == dcg.id) + ).one() + assert new_dcg.atlas == dcg_params["atlas"] + assert new_dcg.atlas_pixel_size == dcg_params["atlas_pixel_size"] + # Check the tag of the data collection group was not updated + assert new_dcg.tag != dcg_params["tag"] + + +@mock.patch("murfey.server.workflow._transport_object") +def test_register_dc_group_new_atlas(mock_transport, murfey_db_session: Session): + """ + Test the request to update an existing data collection group + by adding an atlas, using the same tag + """ + mock_transport.feedback_queue = "mock_feedback_queue" + mock_transport.do_insert_atlas.return_value = 5 + + # Make sure dcg is present without an atlas id + dcg = DataCollectionGroup( + id=1, + session_id=ExampleVisit.murfey_session_id, + tag="processing_tag", + ) + murfey_db_session.add(dcg) + murfey_db_session.commit() + + # Request new dcg registration with atlas and exisiting tag + dcg_params = { + "experiment_type_id": 36, + "tag": "processing_tag", + "atlas": "/path/to/Atlas_2.jpg", + "sample": 1, + "atlas_pixel_size": 1e-4, + } + register_dc_group( + visit_name="cm12345-6", + session_id=ExampleVisit.murfey_session_id, + dcg_params=dcg_params, + db=murfey_db_session, + ) + + # Check no sends are made by the transport object + mock_transport.send.assert_not_called() + + # Check the call to insert the atlas into ispyb + atlas_args = mock_transport.do_insert_atlas.call_args_list + assert len(atlas_args) == 1 + assert atlas_args[0].dataCollectionGroupId == dcg.id + assert atlas_args[0].atlasImage == dcg_params["atlas"] + assert atlas_args[0].pixelSize == dcg_params["atlas_pixel_size"] + assert atlas_args[0].cassetteSlot == dcg_params["sample"] + + # Check the data collection group atlas was updated + new_dcg = murfey_db_session.exec( + select(DataCollectionGroup).where(DataCollectionGroup.id == dcg.id) + ).one() + assert new_dcg.atlas == dcg_params["atlas"] + assert new_dcg.sample == dcg_params["sample"] + assert new_dcg.atlas_pixel_size == dcg_params["atlas_pixel_size"] + assert new_dcg.tag == dcg_params["tag"] + assert new_dcg.atlas_id == 5 + + +@mock.patch("murfey.server.workflow.register_search_map_in_database") +@mock.patch("murfey.server.workflow._transport_object") +def test_register_dc_group_new_atlas_with_searchmaps( + mock_register_search_map, mock_transport, murfey_db_session: Session +): + """ + Test the request to update an existing data collection group + by adding an atlas, using the same tag, and also update search maps + """ + mock_transport.feedback_queue = "mock_feedback_queue" + + # Make sure dcg is present with an atlas id + dcg = DataCollectionGroup( + id=1, + session_id=ExampleVisit.murfey_session_id, + tag="processing_tag", + atlas_id=90, + atlas_pixel_size=1e-5, + sample=1, + atlas="/path/to/Atlas_1.jpg", + ) + murfey_db_session.add(dcg) + murfey_db_session.commit() + + # Add some search maps with the dcg tag and one with a different tag + sm1 = SearchMap( + id=1, + session_id=ExampleVisit.murfey_session_id, + tag="processing_tag", + name="searchmap1", + ) + sm2 = SearchMap( + id=2, + session_id=ExampleVisit.murfey_session_id, + tag="processing_tag", + name="searchmap2", + ) + sm3 = SearchMap( + id=3, + session_id=ExampleVisit.murfey_session_id, + tag="different_tag", + name="searchmap3", + ) + murfey_db_session.add(sm1) + murfey_db_session.add(sm2) + murfey_db_session.add(sm3) + murfey_db_session.commit() + + # Request new dcg registration with new atlas tag and sample + dcg_params = { + "experiment_type_id": 37, + "tag": "processing_tag", + "atlas": "/path/to/Atlas_2.jpg", + "sample": 2, + "atlas_pixel_size": 1e-4, + } + register_dc_group( + visit_name="cm12345-6", + session_id=ExampleVisit.murfey_session_id, + dcg_params=dcg_params, + db=murfey_db_session, + ) + + # Check request to ispyb for updating the experiment type + mock_transport.send.assert_called_once_with( + "mock_feedback_queue", + { + "register": "atlas_update", + "atlas_id": dcg.atlas_id, + "atlas": dcg_params["atlas"], + "sample": dcg_params["sample"], + "atlas_pixel_size": dcg_params["atlas_pixel_size"], + "dcgid": dcg.id, + "session_id": ExampleVisit.murfey_session_id, + }, + ) + + # Check the data collection group atlas was updated + new_dcg = murfey_db_session.exec( + select(DataCollectionGroup).where(DataCollectionGroup.id == dcg.id) + ).one() + assert new_dcg.atlas == dcg_params["atlas"] + assert new_dcg.sample == dcg_params["sample"] + assert new_dcg.atlas_pixel_size == dcg_params["atlas_pixel_size"] + assert new_dcg.tag == dcg_params["tag"] + assert new_dcg.atlas_id == dcg.atlas_id + + # Check the search map update calls + assert mock_register_search_map.call_count == 2 + mock_register_search_map.assert_any_call( + ExampleVisit.murfey_session_id, + "searchmap1", + mock.ANY, + murfey_db_session, + close_db=False, + ) + mock_register_search_map.assert_any_call( + ExampleVisit.murfey_session_id, + "searchmap2", + mock.ANY, + murfey_db_session, + close_db=False, + ) From f8d7790368385032a9c9d21988e0ede138b8af43 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Thu, 27 Nov 2025 16:23:48 +0000 Subject: [PATCH 28/34] Mokced the wrong thing --- tests/server/api/test_workflow.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/server/api/test_workflow.py b/tests/server/api/test_workflow.py index aa0753ded..a4414ffbe 100644 --- a/tests/server/api/test_workflow.py +++ b/tests/server/api/test_workflow.py @@ -7,7 +7,7 @@ from tests.conftest import ExampleVisit -@mock.patch("murfey.server.workflow._transport_object") +@mock.patch("murfey.server.api.workflow._transport_object") def test_register_dc_group_new_dcg(mock_transport, murfey_db_session: Session): """Test the request for a completely new data collection group""" mock_transport.feedback_queue = "mock_feedback_queue" @@ -47,7 +47,7 @@ def test_register_dc_group_new_dcg(mock_transport, murfey_db_session: Session): ) -@mock.patch("murfey.server.workflow._transport_object") +@mock.patch("murfey.server.api.workflow._transport_object") def test_register_dc_group_atlas_to_processing( mock_transport, murfey_db_session: Session ): @@ -102,7 +102,7 @@ def test_register_dc_group_atlas_to_processing( assert new_dcg.tag == dcg_params["tag"] -@mock.patch("murfey.server.workflow._transport_object") +@mock.patch("murfey.server.api.workflow._transport_object") def test_register_dc_group_processing_to_atlas( mock_transport, murfey_db_session: Session ): @@ -164,7 +164,7 @@ def test_register_dc_group_processing_to_atlas( assert new_dcg.tag != dcg_params["tag"] -@mock.patch("murfey.server.workflow._transport_object") +@mock.patch("murfey.server.api.workflow._transport_object") def test_register_dc_group_new_atlas(mock_transport, murfey_db_session: Session): """ Test the request to update an existing data collection group @@ -219,8 +219,8 @@ def test_register_dc_group_new_atlas(mock_transport, murfey_db_session: Session) assert new_dcg.atlas_id == 5 -@mock.patch("murfey.server.workflow.register_search_map_in_database") -@mock.patch("murfey.server.workflow._transport_object") +@mock.patch("murfey.server.api.workflow.register_search_map_in_database") +@mock.patch("murfey.server.api.workflow._transport_object") def test_register_dc_group_new_atlas_with_searchmaps( mock_register_search_map, mock_transport, murfey_db_session: Session ): From 8d957af0dddcbbd6335ccc2373d6691862087634 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Thu, 27 Nov 2025 16:34:48 +0000 Subject: [PATCH 29/34] Fix type of the dcgparams --- tests/server/api/test_workflow.py | 150 +++++++++++++++--------------- 1 file changed, 75 insertions(+), 75 deletions(-) diff --git a/tests/server/api/test_workflow.py b/tests/server/api/test_workflow.py index a4414ffbe..a7c1cd71d 100644 --- a/tests/server/api/test_workflow.py +++ b/tests/server/api/test_workflow.py @@ -2,7 +2,7 @@ from sqlmodel import Session, select -from murfey.server.api.workflow import register_dc_group +from murfey.server.api.workflow import DCGroupParameters, register_dc_group from murfey.util.db import DataCollectionGroup, SearchMap from tests.conftest import ExampleVisit @@ -13,13 +13,13 @@ def test_register_dc_group_new_dcg(mock_transport, murfey_db_session: Session): mock_transport.feedback_queue = "mock_feedback_queue" # Request new dcg registration - dcg_params = { - "experiment_type_id": 44, - "tag": "atlas_tag", - "atlas": "/path/to/Atlas_1.jpg", - "sample": 1, - "atlas_pixel_size": 1e-5, - } + dcg_params = DCGroupParameters( + experiment_type_id=44, + tag="atlas_tag", + atlas="/path/to/Atlas_1.jpg", + sample=10, + atlas_pixel_size=1e-5, + ) register_dc_group( visit_name="cm12345-6", session_id=ExampleVisit.murfey_session_id, @@ -33,12 +33,12 @@ def test_register_dc_group_new_dcg(mock_transport, murfey_db_session: Session): { "register": "data_collection_group", "start_time": mock.ANY, - "experiment_type_id": dcg_params["experiment_type_id"], - "tag": dcg_params["tag"], + "experiment_type_id": 44, + "tag": "atlas_tag", "session_id": ExampleVisit.murfey_session_id, - "atlas": dcg_params["atlas"], - "sample": dcg_params["sample"], - "atlas_pixel_size": dcg_params["atlas_pixel_size"], + "atlas": "/path/to/Atlas_1.jpg", + "sample": 10, + "atlas_pixel_size": 1e-5, "microscope": ExampleVisit.instrument_name, "proposal_code": ExampleVisit.proposal_code, "proposal_number": ExampleVisit.proposal_number, @@ -64,20 +64,20 @@ def test_register_dc_group_atlas_to_processing( tag="atlas_tag", atlas_id=90, atlas_pixel_size=1e-5, - sample=1, + sample=10, atlas="/path/to/Atlas_1.jpg", ) murfey_db_session.add(dcg) murfey_db_session.commit() # Request new dcg registration with processing experiment type and tag - dcg_params = { - "experiment_type_id": 36, - "tag": "processing_tag", - "atlas": "/path/to/Atlas_1.jpg", - "sample": 1, - "atlas_pixel_size": 1e-5, - } + dcg_params = DCGroupParameters( + experiment_type_id=36, + tag="processing_tag", + atlas="/path/to/Atlas_1.jpg", + sample=10, + atlas_pixel_size=1e-5, + ) register_dc_group( visit_name="cm12345-6", session_id=ExampleVisit.murfey_session_id, @@ -90,16 +90,16 @@ def test_register_dc_group_atlas_to_processing( "mock_feedback_queue", { "register": "experiment_type_update", - "experiment_type_id": dcg_params["experiment_type_id"], - "dcgid": dcg.id, + "experiment_type_id": 36, + "dcgid": 1, }, ) # Check that the tag of the data collection group was updated new_dcg = murfey_db_session.exec( - select(DataCollectionGroup).where(DataCollectionGroup.id == dcg.id) + select(DataCollectionGroup).where(DataCollectionGroup.id == 1) ).one() - assert new_dcg.tag == dcg_params["tag"] + assert new_dcg.tag == "preprocessing_tag" @mock.patch("murfey.server.api.workflow._transport_object") @@ -126,13 +126,13 @@ def test_register_dc_group_processing_to_atlas( murfey_db_session.commit() # Request new dcg registration with atlas experiment type and tag - dcg_params = { - "experiment_type_id": 44, - "tag": "atlas_tag", - "atlas": "/path/to/Atlas_2.jpg", - "sample": 1, - "atlas_pixel_size": 1e-4, - } + dcg_params = DCGroupParameters( + experiment_type_id=44, + tag="atlas_tag", + atlas="/path/to/Atlas_2.jpg", + sample=10, + atlas_pixel_size=1e-4, + ) register_dc_group( visit_name="cm12345-6", session_id=ExampleVisit.murfey_session_id, @@ -145,23 +145,23 @@ def test_register_dc_group_processing_to_atlas( "mock_feedback_queue", { "register": "atlas_update", - "atlas_id": dcg.atlas_id, - "atlas": dcg_params["atlas"], - "sample": dcg_params["sample"], - "atlas_pixel_size": dcg_params["atlas_pixel_size"], - "dcgid": dcg.id, + "atlas_id": 90, + "atlas": "/path/to/Atlas_2.jpg", + "sample": 10, + "atlas_pixel_size": 1e-4, + "dcgid": 1, "session_id": ExampleVisit.murfey_session_id, }, ) # Check the data collection group atlas was updated new_dcg = murfey_db_session.exec( - select(DataCollectionGroup).where(DataCollectionGroup.id == dcg.id) + select(DataCollectionGroup).where(DataCollectionGroup.id == 1) ).one() - assert new_dcg.atlas == dcg_params["atlas"] - assert new_dcg.atlas_pixel_size == dcg_params["atlas_pixel_size"] + assert new_dcg.atlas == "/path/to/Atlas_2.jpg" + assert new_dcg.atlas_pixel_size == 1e-4 # Check the tag of the data collection group was not updated - assert new_dcg.tag != dcg_params["tag"] + assert new_dcg.tag != "atlas_tag" @mock.patch("murfey.server.api.workflow._transport_object") @@ -183,13 +183,13 @@ def test_register_dc_group_new_atlas(mock_transport, murfey_db_session: Session) murfey_db_session.commit() # Request new dcg registration with atlas and exisiting tag - dcg_params = { - "experiment_type_id": 36, - "tag": "processing_tag", - "atlas": "/path/to/Atlas_2.jpg", - "sample": 1, - "atlas_pixel_size": 1e-4, - } + dcg_params = DCGroupParameters( + experiment_type_id=36, + tag="processing_tag", + atlas="/path/to/Atlas_2.jpg", + sample=10, + atlas_pixel_size=1e-4, + ) register_dc_group( visit_name="cm12345-6", session_id=ExampleVisit.murfey_session_id, @@ -203,19 +203,19 @@ def test_register_dc_group_new_atlas(mock_transport, murfey_db_session: Session) # Check the call to insert the atlas into ispyb atlas_args = mock_transport.do_insert_atlas.call_args_list assert len(atlas_args) == 1 - assert atlas_args[0].dataCollectionGroupId == dcg.id - assert atlas_args[0].atlasImage == dcg_params["atlas"] - assert atlas_args[0].pixelSize == dcg_params["atlas_pixel_size"] - assert atlas_args[0].cassetteSlot == dcg_params["sample"] + assert atlas_args[0].dataCollectionGroupId == 1 + assert atlas_args[0].atlasImage == "/path/to/Atlas_2.jpg" + assert atlas_args[0].pixelSize == 1e-4 + assert atlas_args[0].cassetteSlot == 10 # Check the data collection group atlas was updated new_dcg = murfey_db_session.exec( - select(DataCollectionGroup).where(DataCollectionGroup.id == dcg.id) + select(DataCollectionGroup).where(DataCollectionGroup.id == 1) ).one() - assert new_dcg.atlas == dcg_params["atlas"] - assert new_dcg.sample == dcg_params["sample"] - assert new_dcg.atlas_pixel_size == dcg_params["atlas_pixel_size"] - assert new_dcg.tag == dcg_params["tag"] + assert new_dcg.atlas == "/path/to/Atlas_2.jpg" + assert new_dcg.sample == 10 + assert new_dcg.atlas_pixel_size == 1e-4 + assert new_dcg.tag == "atlas_tag" assert new_dcg.atlas_id == 5 @@ -237,7 +237,7 @@ def test_register_dc_group_new_atlas_with_searchmaps( tag="processing_tag", atlas_id=90, atlas_pixel_size=1e-5, - sample=1, + sample=10, atlas="/path/to/Atlas_1.jpg", ) murfey_db_session.add(dcg) @@ -268,13 +268,13 @@ def test_register_dc_group_new_atlas_with_searchmaps( murfey_db_session.commit() # Request new dcg registration with new atlas tag and sample - dcg_params = { - "experiment_type_id": 37, - "tag": "processing_tag", - "atlas": "/path/to/Atlas_2.jpg", - "sample": 2, - "atlas_pixel_size": 1e-4, - } + dcg_params = DCGroupParameters( + experiment_type_id=37, + tag="processing_tag", + atlas="/path/to/Atlas_2.jpg", + sample=12, + atlas_pixel_size=1e-4, + ) register_dc_group( visit_name="cm12345-6", session_id=ExampleVisit.murfey_session_id, @@ -287,11 +287,11 @@ def test_register_dc_group_new_atlas_with_searchmaps( "mock_feedback_queue", { "register": "atlas_update", - "atlas_id": dcg.atlas_id, - "atlas": dcg_params["atlas"], - "sample": dcg_params["sample"], - "atlas_pixel_size": dcg_params["atlas_pixel_size"], - "dcgid": dcg.id, + "atlas_id": 90, + "atlas": "/path/to/Atlas_2.jpg", + "sample": 12, + "atlas_pixel_size": 1e-4, + "dcgid": 1, "session_id": ExampleVisit.murfey_session_id, }, ) @@ -300,11 +300,11 @@ def test_register_dc_group_new_atlas_with_searchmaps( new_dcg = murfey_db_session.exec( select(DataCollectionGroup).where(DataCollectionGroup.id == dcg.id) ).one() - assert new_dcg.atlas == dcg_params["atlas"] - assert new_dcg.sample == dcg_params["sample"] - assert new_dcg.atlas_pixel_size == dcg_params["atlas_pixel_size"] - assert new_dcg.tag == dcg_params["tag"] - assert new_dcg.atlas_id == dcg.atlas_id + assert new_dcg.atlas == "/path/to/Atlas_2.jpg" + assert new_dcg.sample == 12 + assert new_dcg.atlas_pixel_size == 1e-4 + assert new_dcg.tag == "processing_tag" + assert new_dcg.atlas_id == 90 # Check the search map update calls assert mock_register_search_map.call_count == 2 From 04bdc2719b18570a05f921c73d2611948905d9b3 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Thu, 27 Nov 2025 16:48:39 +0000 Subject: [PATCH 30/34] Minor messups in the tests --- tests/server/api/test_workflow.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/server/api/test_workflow.py b/tests/server/api/test_workflow.py index a7c1cd71d..05e31dd2d 100644 --- a/tests/server/api/test_workflow.py +++ b/tests/server/api/test_workflow.py @@ -39,7 +39,7 @@ def test_register_dc_group_new_dcg(mock_transport, murfey_db_session: Session): "atlas": "/path/to/Atlas_1.jpg", "sample": 10, "atlas_pixel_size": 1e-5, - "microscope": ExampleVisit.instrument_name, + "microscope": "", "proposal_code": ExampleVisit.proposal_code, "proposal_number": ExampleVisit.proposal_number, "visit_number": ExampleVisit.visit_number, @@ -99,7 +99,7 @@ def test_register_dc_group_atlas_to_processing( new_dcg = murfey_db_session.exec( select(DataCollectionGroup).where(DataCollectionGroup.id == 1) ).one() - assert new_dcg.tag == "preprocessing_tag" + assert new_dcg.tag == "processing_tag" @mock.patch("murfey.server.api.workflow._transport_object") @@ -119,7 +119,7 @@ def test_register_dc_group_processing_to_atlas( tag="processing_tag", atlas_id=90, atlas_pixel_size=1e-5, - sample=1, + sample=10, atlas="/path/to/Atlas_1.jpg", ) murfey_db_session.add(dcg) @@ -171,7 +171,7 @@ def test_register_dc_group_new_atlas(mock_transport, murfey_db_session: Session) by adding an atlas, using the same tag """ mock_transport.feedback_queue = "mock_feedback_queue" - mock_transport.do_insert_atlas.return_value = 5 + mock_transport.do_insert_atlas.return_value = {"return_value": 5} # Make sure dcg is present without an atlas id dcg = DataCollectionGroup( From 9df629dd3999b55b27e71ba86d41c42738e4c033 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Thu, 27 Nov 2025 16:55:14 +0000 Subject: [PATCH 31/34] mocked wrong way around --- tests/server/api/test_workflow.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/server/api/test_workflow.py b/tests/server/api/test_workflow.py index 05e31dd2d..5cd99c856 100644 --- a/tests/server/api/test_workflow.py +++ b/tests/server/api/test_workflow.py @@ -41,8 +41,8 @@ def test_register_dc_group_new_dcg(mock_transport, murfey_db_session: Session): "atlas_pixel_size": 1e-5, "microscope": "", "proposal_code": ExampleVisit.proposal_code, - "proposal_number": ExampleVisit.proposal_number, - "visit_number": ExampleVisit.visit_number, + "proposal_number": str(ExampleVisit.proposal_number), + "visit_number": str(ExampleVisit.visit_number), }, ) @@ -203,10 +203,10 @@ def test_register_dc_group_new_atlas(mock_transport, murfey_db_session: Session) # Check the call to insert the atlas into ispyb atlas_args = mock_transport.do_insert_atlas.call_args_list assert len(atlas_args) == 1 - assert atlas_args[0].dataCollectionGroupId == 1 - assert atlas_args[0].atlasImage == "/path/to/Atlas_2.jpg" - assert atlas_args[0].pixelSize == 1e-4 - assert atlas_args[0].cassetteSlot == 10 + assert atlas_args[0][0].dataCollectionGroupId == 1 + assert atlas_args[0][0].atlasImage == "/path/to/Atlas_2.jpg" + assert atlas_args[0][0].pixelSize == 1e-4 + assert atlas_args[0][0].cassetteSlot == 10 # Check the data collection group atlas was updated new_dcg = murfey_db_session.exec( @@ -219,8 +219,8 @@ def test_register_dc_group_new_atlas(mock_transport, murfey_db_session: Session) assert new_dcg.atlas_id == 5 -@mock.patch("murfey.server.api.workflow.register_search_map_in_database") @mock.patch("murfey.server.api.workflow._transport_object") +@mock.patch("murfey.server.api.workflow.register_search_map_in_database") def test_register_dc_group_new_atlas_with_searchmaps( mock_register_search_map, mock_transport, murfey_db_session: Session ): From 97f9ff0844fdceb7e41db0100d8e9b2c90ac1feb Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Thu, 27 Nov 2025 17:09:41 +0000 Subject: [PATCH 32/34] Yet another tuple layer --- tests/server/api/test_workflow.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/server/api/test_workflow.py b/tests/server/api/test_workflow.py index 5cd99c856..e38fcc7f4 100644 --- a/tests/server/api/test_workflow.py +++ b/tests/server/api/test_workflow.py @@ -203,10 +203,10 @@ def test_register_dc_group_new_atlas(mock_transport, murfey_db_session: Session) # Check the call to insert the atlas into ispyb atlas_args = mock_transport.do_insert_atlas.call_args_list assert len(atlas_args) == 1 - assert atlas_args[0][0].dataCollectionGroupId == 1 - assert atlas_args[0][0].atlasImage == "/path/to/Atlas_2.jpg" - assert atlas_args[0][0].pixelSize == 1e-4 - assert atlas_args[0][0].cassetteSlot == 10 + assert atlas_args[0][0][0].dataCollectionGroupId == 1 + assert atlas_args[0][0][0].atlasImage == "/path/to/Atlas_2.jpg" + assert atlas_args[0][0][0].pixelSize == 1e-4 + assert atlas_args[0][0][0].cassetteSlot == 10 # Check the data collection group atlas was updated new_dcg = murfey_db_session.exec( From 608f9ad74002c63dd518b3b9c6b551cacf8df678 Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Thu, 27 Nov 2025 17:13:30 +0000 Subject: [PATCH 33/34] Wrong tag --- tests/server/api/test_workflow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/server/api/test_workflow.py b/tests/server/api/test_workflow.py index e38fcc7f4..01ebffe6b 100644 --- a/tests/server/api/test_workflow.py +++ b/tests/server/api/test_workflow.py @@ -215,7 +215,7 @@ def test_register_dc_group_new_atlas(mock_transport, murfey_db_session: Session) assert new_dcg.atlas == "/path/to/Atlas_2.jpg" assert new_dcg.sample == 10 assert new_dcg.atlas_pixel_size == 1e-4 - assert new_dcg.tag == "atlas_tag" + assert new_dcg.tag == "processing_tag" assert new_dcg.atlas_id == 5 From 796fb7124848fab738717dbd04cbf160f6a3987d Mon Sep 17 00:00:00 2001 From: yxd92326 Date: Mon, 1 Dec 2025 14:39:24 +0000 Subject: [PATCH 34/34] Rename hook --- src/murfey/client/context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/murfey/client/context.py b/src/murfey/client/context.py index 36d4c9bca..57cc0bb3d 100644 --- a/src/murfey/client/context.py +++ b/src/murfey/client/context.py @@ -51,7 +51,7 @@ def ensure_dcg_exists( session_file = metadata_source / "EpuSession.dm" for h in entry_points(group="murfey.hooks"): try: - if h.name == "get_epu_session": + if h.name == "get_epu_session_metadata": h.load()(session_file, environment=environment) except Exception as e: logger.warning(f"Get EPU session hook failed: {e}")