Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 2 additions & 4 deletions src/murfey/cli/transfer.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,8 @@
murfey_url = urlparse(args.server, allow_fragments=False)

machine_data = requests.get(f"{murfey_url.geturl()}/machine").json()
if Path(args.source or ".").resolve() in machine_data.data_directories.keys():
console.print(
f"[red]Source directory is the base directory for the {machine_data.data_directories[Path(args.source or '.').resolve()]}, exiting"
)
if Path(args.source or ".").resolve() in machine_data.data_directories:
console.print("[red]Source directory is the base directory, exiting")

Check warning on line 40 in src/murfey/cli/transfer.py

View check run for this annotation

Codecov / codecov/patch

src/murfey/cli/transfer.py#L40

Added line #L40 was not covered by tests
return

cmd = [
Expand Down
7 changes: 0 additions & 7 deletions src/murfey/client/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,12 +215,6 @@ def run():
default=False,
help="Remove source files immediately after their transfer",
)
parser.add_argument(
"--relax",
action="store_true",
default=False,
help="Relax the condition that the source directory needs to be recognised from the configuration",
)
parser.add_argument(
"--name",
type=str,
Expand Down Expand Up @@ -344,7 +338,6 @@ def run():
gain_ref=gain_ref,
redirected_logger=rich_handler,
force_mdoc_metadata=not args.ignore_mdoc_metadata,
strict=not args.relax,
processing_enabled=machine_data.get("processing_enabled", True),
skip_existing_processing=args.skip_existing_processing,
)
Expand Down
53 changes: 9 additions & 44 deletions src/murfey/client/analyser.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@
self._limited = limited
self._experiment_type = ""
self._acquisition_software = ""
self._role = ""
self._extension: str = ""
self._unseen_xml: list = []
self._context: Context | None = None
Expand Down Expand Up @@ -126,14 +125,12 @@
in the Context classes themselves.
"""
if "atlas" in file_path.parts:
self._role = "detector"
self._context = SPAMetadataContext("epu", self._basepath)
return True

# CLEM workflow checks
# Look for LIF and XLIF files
if file_path.suffix in (".lif", ".xlif"):
self._role = "detector"
self._context = CLEMContext("leica", self._basepath)
return True
# Look for TIFF files associated with CLEM workflow
Expand All @@ -152,7 +149,6 @@
).get("analyse_created_directories", [])
)
if created_directories.intersection(set(file_path.parts)):
self._role = "detector"
self._context = CLEMContext("leica", self._basepath)
return True

Expand Down Expand Up @@ -181,9 +177,6 @@
else SPAContext("epu", self._basepath)
)
self.parameters_model = ProcessingParametersSPA
# Assign it the detector attribute if not already present
if not self._role:
self._role = "detector"
return True

# Files starting with "Position" belong to the standard tomography workflow
Expand All @@ -198,23 +191,6 @@
logger.info("Acquisition software: tomo")
self._context = TomographyContext("tomo", self._basepath)
self.parameters_model = PreprocessingParametersTomo
# Assign role if not already present
if not self._role:
# Fractions files attributed to the detector
if (
"Fractions" in split_file_name[-1]
or "fractions" in split_file_name[-1]
):
self._role = "detector"
# MDOC files attributed to the microscope
elif (
file_path.suffix == ".mdoc"
or file_path.with_suffix(".mdoc").is_file()
):
self._role = "microscope"
# Attribute all other files to the detector
else:
self._role = "detector"
return True

# Files with these suffixes belong to the serial EM tomography workflow
Expand All @@ -239,19 +215,14 @@
return False
self._context = TomographyContext("serialem", self._basepath)
self.parameters_model = PreprocessingParametersTomo
if not self._role:
if "Frames" in file_path.parts:
self._role = "detector"
else:
self._role = "microscope"
return True
return False

def post_transfer(self, transferred_file: Path):
try:
if self._context:
self._context.post_transfer(
transferred_file, role=self._role, environment=self._environment
transferred_file, environment=self._environment
)
except Exception as e:
logger.error(f"An exception was encountered post transfer: {e}")
Expand Down Expand Up @@ -309,19 +280,17 @@
self.queue.task_done()
continue
elif self._extension:
logger.info(f"Context found successfully: {self._role}")
logger.info(
f"Context found successfully for {transferred_file}"
)
try:
self._context.post_first_transfer(
transferred_file,
role=self._role,
environment=self._environment,
)
except Exception as e:
logger.error(f"Exception encountered: {e}")
if (
self._role == "detector"
and "atlas" not in transferred_file.parts
):
if "atlas" not in transferred_file.parts:
if not dc_metadata:
try:
dc_metadata = self._context.gather_metadata(
Expand Down Expand Up @@ -377,20 +346,16 @@
self._find_extension(transferred_file)
if self._extension:
logger.info(
f"Context found successfully: {self._role}, {transferred_file}"
f"Extension found successfully for {transferred_file}"
)
try:
self._context.post_first_transfer(
transferred_file,
role=self._role,
environment=self._environment,
)
except Exception as e:
logger.error(f"Exception encountered: {e}")
if (
self._role == "detector"
and "atlas" not in transferred_file.parts
):
if "atlas" not in transferred_file.parts:
if not dc_metadata:
try:
dc_metadata = self._context.gather_metadata(
Expand Down Expand Up @@ -443,8 +408,8 @@
if not self._environment:
return data_file.with_suffix(".xml")
file_name = f"{'_'.join(p for p in data_file.stem.split('_')[:-1])}.xml"
data_directories = self._murfey_config.get("data_directories", {})
for dd in data_directories.keys():
data_directories = self._murfey_config.get("data_directories", [])

Check warning on line 411 in src/murfey/client/analyser.py

View check run for this annotation

Codecov / codecov/patch

src/murfey/client/analyser.py#L411

Added line #L411 was not covered by tests
for dd in data_directories:
if str(data_file).startswith(dd):
base_dir = Path(dd)
mid_dir = data_file.relative_to(dd).parent
Expand Down
18 changes: 14 additions & 4 deletions src/murfey/client/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,24 @@
self._acquisition_software = acquisition_software
self.name = name

def post_transfer(self, transferred_file: Path, role: str = "", **kwargs):
def post_transfer(
self,
transferred_file: Path,
environment: MurfeyInstanceEnvironment | None = None,
**kwargs,
):
# Search external packages for additional hooks to include in Murfey
for h in entry_points(group="murfey.post_transfer_hooks"):
if h.name == self.name:
h.load()(transferred_file, role=role, **kwargs)
h.load()(transferred_file, environment=environment, **kwargs)

Check warning on line 52 in src/murfey/client/context.py

View check run for this annotation

Codecov / codecov/patch

src/murfey/client/context.py#L52

Added line #L52 was not covered by tests

def post_first_transfer(self, transferred_file: Path, role: str = "", **kwargs):
self.post_transfer(transferred_file, role=role, **kwargs)
def post_first_transfer(
self,
transferred_file: Path,
environment: MurfeyInstanceEnvironment | None = None,
**kwargs,
):
self.post_transfer(transferred_file, environment=environment, **kwargs)

def gather_metadata(
self, metadata_file: Path, environment: MurfeyInstanceEnvironment | None = None
Expand Down
5 changes: 1 addition & 4 deletions src/murfey/client/contexts/clem.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,14 +101,11 @@
def post_transfer(
self,
transferred_file: Path,
role: str = "",
environment: Optional[MurfeyInstanceEnvironment] = None,
**kwargs,
) -> bool:

super().post_transfer(
transferred_file, role=role, environment=environment, **kwargs
)
super().post_transfer(transferred_file, environment=environment, **kwargs)

Check warning on line 108 in src/murfey/client/contexts/clem.py

View check run for this annotation

Codecov / codecov/patch

src/murfey/client/contexts/clem.py#L108

Added line #L108 was not covered by tests

# Process files generated by "auto-save" acquisition mode
# These include TIF/TIFF and XLIF files
Expand Down
5 changes: 1 addition & 4 deletions src/murfey/client/contexts/fib.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,10 @@
def post_transfer(
self,
transferred_file: Path,
role: str = "",
environment: MurfeyInstanceEnvironment | None = None,
**kwargs,
):
super().post_transfer(
transferred_file, role=role, environment=environment, **kwargs
)
super().post_transfer(transferred_file, environment=environment, **kwargs)

Check warning on line 50 in src/murfey/client/contexts/fib.py

View check run for this annotation

Codecov / codecov/patch

src/murfey/client/contexts/fib.py#L50

Added line #L50 was not covered by tests
if self._acquisition_software == "autotem":
parts = transferred_file.parts
if "DCImages" in parts and transferred_file.suffix == ".png":
Expand Down
11 changes: 4 additions & 7 deletions src/murfey/client/contexts/spa.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,9 +136,9 @@ def _foil_hole_from_file(f: Path) -> int:


def _grid_square_metadata_file(
f: Path, data_directories: Dict[Path, str], visit: str, grid_square: int
f: Path, data_directories: List[Path], visit: str, grid_square: int
) -> Path:
for dd in data_directories.keys():
for dd in data_directories:
if str(f).startswith(str(dd)):
base_dir = dd
mid_dir = f.relative_to(dd).parent
Expand Down Expand Up @@ -542,7 +542,7 @@ def _position_analysis(
grid_square = _grid_square_from_file(transferred_file)
grid_square_metadata_file = _grid_square_metadata_file(
transferred_file,
{Path(d): l for d, l in machine_config["data_directories"].items()},
machine_config["data_directories"],
environment.visit,
grid_square,
)
Expand Down Expand Up @@ -669,18 +669,16 @@ def _position_analysis(
def post_transfer(
self,
transferred_file: Path,
role: str = "",
environment: MurfeyInstanceEnvironment | None = None,
**kwargs,
) -> bool:
super().post_transfer(
transferred_file=transferred_file,
role=role,
environment=environment,
**kwargs,
)
data_suffixes = (".mrc", ".tiff", ".tif", ".eer")
if role == "detector" and "gain" not in transferred_file.name:
if "gain" not in transferred_file.name:
if transferred_file.suffix in data_suffixes:
if self._acquisition_software == "epu":
if environment:
Expand Down Expand Up @@ -856,7 +854,6 @@ def _register_data_collection(
def post_transfer(
self,
transferred_file: Path,
role: str = "",
environment: MurfeyInstanceEnvironment | None = None,
**kwargs,
) -> bool:
Expand Down
2 changes: 0 additions & 2 deletions src/murfey/client/contexts/spa_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,13 +43,11 @@ def __init__(self, acquisition_software: str, basepath: Path):
def post_transfer(
self,
transferred_file: Path,
role: str = "",
environment: Optional[MurfeyInstanceEnvironment] = None,
**kwargs,
):
super().post_transfer(
transferred_file=transferred_file,
role=role,
environment=environment,
**kwargs,
)
Expand Down
10 changes: 3 additions & 7 deletions src/murfey/client/contexts/tomo.py
Original file line number Diff line number Diff line change
Expand Up @@ -549,20 +549,19 @@ def _extract_tilt_series(p: Path) -> str:
def post_transfer(
self,
transferred_file: Path,
role: str = "",
environment: MurfeyInstanceEnvironment | None = None,
**kwargs,
) -> List[str]:
super().post_transfer(
transferred_file=transferred_file,
role=role,
environment=environment,
**kwargs,
)

data_suffixes = (".mrc", ".tiff", ".tif", ".eer")
completed_tilts = []
if role == "detector" and "gain" not in transferred_file.name:

if "gain" not in transferred_file.name:
if transferred_file.suffix in data_suffixes:
if self._acquisition_software == "tomo":
if environment:
Expand Down Expand Up @@ -632,13 +631,10 @@ def post_transfer(
def post_first_transfer(
self,
transferred_file: Path,
role: str = "",
environment: MurfeyInstanceEnvironment | None = None,
**kwargs,
):
self.post_transfer(
transferred_file, role=role, environment=environment, **kwargs
)
self.post_transfer(transferred_file, environment=environment, **kwargs)

def gather_metadata(
self, metadata_file: Path, environment: MurfeyInstanceEnvironment | None = None
Expand Down
7 changes: 0 additions & 7 deletions src/murfey/client/multigrid_control.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,13 +257,6 @@ def rsync_result(update: RSyncerUpdate):
force_mdoc_metadata=self.force_mdoc_metadata,
limited=limited,
)
for data_dir in self._machine_config["data_directories"].keys():
if source.resolve().is_relative_to(Path(data_dir)):
self.analysers[source]._role = self._machine_config[
"data_directories"
][data_dir]
log.info(f"role found for {source}")
break
if force_metadata:
self.analysers[source].subscribe(
partial(self._start_dc, from_form=True)
Expand Down
18 changes: 0 additions & 18 deletions src/murfey/client/tui/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@ def __init__(
gain_ref: Path | None = None,
redirected_logger=None,
force_mdoc_metadata: bool = False,
strict: bool = False,
processing_enabled: bool = True,
skip_existing_processing: bool = False,
**kwargs,
Expand Down Expand Up @@ -104,7 +103,6 @@ def __init__(
self._processing_enabled = processing_enabled
self._multigrid_watcher: MultigridDirWatcher | None = None
self._force_mdoc_metadata = force_mdoc_metadata
self._strict = strict
self._skip_existing_processing = skip_existing_processing
self._machine_config = get_machine_config_client(
str(self._environment.url.geturl()),
Expand All @@ -120,12 +118,6 @@ def __init__(
]
self.install_screen(MainScreen(), "main")

@property
def role(self) -> str:
if self.analyser:
return self.analyser._role
return ""

def _launch_multigrid_watcher(
self, source: Path, destination_overrides: Dict[Path, str] | None = None
):
Expand Down Expand Up @@ -291,16 +283,6 @@ def rsync_result(update: RSyncerUpdate):
force_mdoc_metadata=self._force_mdoc_metadata,
limited=limited,
)
machine_data = requests.get(
f"{self._environment.url.geturl()}/machine"
).json()
for data_dir in machine_data["data_directories"].keys():
if source.resolve().is_relative_to(Path(data_dir)):
self.analysers[source]._role = machine_data["data_directories"][
data_dir
]
log.info(f"role found for {source}")
break
if force_metadata:
self.analysers[source].subscribe(
partial(self._start_dc, from_form=True)
Expand Down
Loading