Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
91a16c9
Separate the bulk of the '_analyse' logic from the while loop it is i…
tieneupin Apr 10, 2026
ef1923a
Explicitly determine which contexts to permit to enter the DC metadat…
tieneupin Apr 10, 2026
e348b9b
Updated type hinting and rearranged functions in 'murfey.client.context'
tieneupin Apr 13, 2026
a410fa6
Adjusted logic for '_analyse' so that context-specific metadata proce…
tieneupin Apr 13, 2026
b848a97
Typo
tieneupin Apr 13, 2026
4cb78c5
Streamlined data structure for example files used in context test
tieneupin Apr 13, 2026
ee8a175
Add stubs for tests for the different supported workflows
tieneupin Apr 13, 2026
7852f04
Added tests for '_analyse' for the CLEM, FIB, and SXT workflows, as w…
tieneupin Apr 13, 2026
00508df
Added tests for the 'limited' logic block in '_analyse'
tieneupin Apr 13, 2026
a76e2d0
Added test for atlas analysis and SPA analysis
tieneupin Apr 13, 2026
fb07598
Add preliminary test for '_analyse' for tomo workflow
tieneupin Apr 14, 2026
7e00b7e
Added test for the '_find_extension' class function
tieneupin Apr 14, 2026
884bbc0
Missing brackets around the 'or' clauses in the 'if self._limited' bl…
tieneupin Apr 14, 2026
b912c2e
Updated Context names and removed deprecated logic for parsing XML me…
tieneupin Apr 14, 2026
24fee4a
Fixed broken test
tieneupin Apr 14, 2026
a51a5b4
Major rewrite of the '_analyse' function:
tieneupin Apr 14, 2026
76b529d
Forgot to include 'AtlasContext' in match-case logic
tieneupin Apr 14, 2026
0373096
No need for else-block
tieneupin Apr 14, 2026
d2122ad
Updated Analyser tests after rewrite
tieneupin Apr 14, 2026
2575978
Expanded '_analyse' tests for the SPA and Tomo contexts
tieneupin Apr 14, 2026
f8ffd2c
Fixed typo in FIBContext AutoTEM workflow
tieneupin Apr 15, 2026
4609edd
Merged recent changes from 'main' branch
tieneupin Apr 15, 2026
829bd48
Typo in documentation
tieneupin Apr 15, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
312 changes: 135 additions & 177 deletions src/murfey/client/analyser.py

Large diffs are not rendered by default.

20 changes: 10 additions & 10 deletions src/murfey/client/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging
from importlib.metadata import entry_points
from pathlib import Path
from typing import Any, List, NamedTuple
from typing import Any, NamedTuple, OrderedDict

import xmltodict

Expand Down Expand Up @@ -209,12 +209,6 @@ def ensure_dcg_exists(
return dcg_tag


class ProcessingParameter(NamedTuple):
name: str
label: str
default: Any = None


def detect_acquisition_software(dir_for_transfer: Path) -> str:
glob = dir_for_transfer.glob("*")
for f in glob:
Expand All @@ -225,9 +219,15 @@ def detect_acquisition_software(dir_for_transfer: Path) -> str:
return ""


class ProcessingParameter(NamedTuple):
name: str
label: str
default: Any = None


class Context:
user_params: List[ProcessingParameter] = []
metadata_params: List[ProcessingParameter] = []
user_params: list[ProcessingParameter] = []
metadata_params: list[ProcessingParameter] = []

def __init__(self, name: str, acquisition_software: str, token: str):
self._acquisition_software = acquisition_software
Expand Down Expand Up @@ -256,7 +256,7 @@ def post_first_transfer(

def gather_metadata(
self, metadata_file: Path, environment: MurfeyInstanceEnvironment | None = None
):
) -> OrderedDict | None:
raise NotImplementedError(
f"gather_metadata must be declared in derived class to be used: {self}"
)
2 changes: 1 addition & 1 deletion src/murfey/client/contexts/atlas.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def __init__(
machine_config: dict,
token: str,
):
super().__init__("Atlas", acquisition_software, token)
super().__init__("AtlasContext", acquisition_software, token)
self._basepath = basepath
self._machine_config = machine_config

Expand Down
2 changes: 1 addition & 1 deletion src/murfey/client/contexts/clem.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def __init__(
machine_config: dict,
token: str,
):
super().__init__("CLEM", acquisition_software, token)
super().__init__("CLEMContext", acquisition_software, token)
self._basepath = basepath
self._machine_config = machine_config
# CLEM contexts for "auto-save" acquisition mode
Expand Down
4 changes: 2 additions & 2 deletions src/murfey/client/contexts/fib.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def __init__(
machine_config: dict,
token: str,
):
super().__init__("FIB", acquisition_software, token)
super().__init__("FIBContext", acquisition_software, token)
self._basepath = basepath
self._machine_config = machine_config
self._milling: dict[int, list[MillingProgress]] = {}
Expand Down Expand Up @@ -189,7 +189,7 @@ def post_transfer(
sites = metadata["AutoTEM"]["Project"]["Sites"]["Site"]
for site in sites:
number = _number_from_name(site["Name"])
milling_angle = site["Workflow"]["Recipe"][0]["Activites"][
milling_angle = site["Workflow"]["Recipe"][0]["Activities"][
"MillingAngleActivity"
].get("MillingAngle")
if self._lamellae.get(number) and milling_angle:
Expand Down
2 changes: 1 addition & 1 deletion src/murfey/client/contexts/spa.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def __init__(
machine_config: dict,
token: str,
):
super().__init__("SPA", acquisition_software, token)
super().__init__("SPAContext", acquisition_software, token)
self._basepath = basepath
self._machine_config = machine_config
self._processing_job_stash: dict = {}
Expand Down
2 changes: 1 addition & 1 deletion src/murfey/client/contexts/spa_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def __init__(
machine_config: dict,
token: str,
):
super().__init__("SPA_metadata", acquisition_software, token)
super().__init__("SPAMetadataContext", acquisition_software, token)
self._basepath = basepath
self._machine_config = machine_config

Expand Down
2 changes: 1 addition & 1 deletion src/murfey/client/contexts/sxt.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def __init__(
machine_config: dict,
token: str,
):
super().__init__("SXT", acquisition_software, token)
super().__init__("SXTContext", acquisition_software, token)
self._basepath = basepath
self._machine_config = machine_config

Expand Down
38 changes: 2 additions & 36 deletions src/murfey/client/contexts/tomo.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
from threading import RLock
from typing import Callable, Dict, List, OrderedDict

import xmltodict

import murfey.util.eer
from murfey.client.context import Context, ProcessingParameter, ensure_dcg_exists
from murfey.client.instance_environment import (
Expand Down Expand Up @@ -84,7 +82,7 @@ def __init__(
machine_config: dict,
token: str,
):
super().__init__("Tomography", acquisition_software, token)
super().__init__("TomographyContext", acquisition_software, token)
self._basepath = basepath
self._machine_config = machine_config
self._tilt_series: Dict[str, List[Path]] = {}
Expand Down Expand Up @@ -550,46 +548,14 @@ def post_first_transfer(
def gather_metadata(
self, metadata_file: Path, environment: MurfeyInstanceEnvironment | None = None
) -> OrderedDict:
if metadata_file.suffix not in (".mdoc", ".xml"):
if metadata_file.suffix != ".mdoc":
raise ValueError(
f"Tomography gather_metadata method expected xml or mdoc file not {metadata_file.name}"
)
try:
if not metadata_file.is_file():
logger.debug(f"Metadata file {metadata_file} not found")
return OrderedDict({})
if metadata_file.suffix == ".xml":
with open(metadata_file, "r") as xml:
try:
for_parsing = xml.read()
except Exception:
logger.warning(
f"Failed to parse file {metadata_file}", exc_info=True
)
return OrderedDict({})
data = xmltodict.parse(for_parsing)
try:
metadata: OrderedDict = OrderedDict({})
metadata["experiment_type"] = "tomography"
metadata["voltage"] = 300
metadata["image_size_x"] = data["Acquisition"]["Info"]["ImageSize"][
"Width"
]
metadata["image_size_y"] = data["Acquisition"]["Info"]["ImageSize"][
"Height"
]
metadata["pixel_size_on_image"] = float(
data["Acquisition"]["Info"]["SensorPixelSize"]["Height"]
)
metadata["motion_corr_binning"] = 1
metadata["gain_ref"] = None
metadata["dose_per_frame"] = (
environment.dose_per_frame if environment else None
)
metadata["source"] = str(self._basepath)
except KeyError:
return OrderedDict({})
return metadata
with open(metadata_file, "r") as md:
mdoc_data = get_global_data(md)
num_blocks = get_num_blocks(md)
Expand Down
2 changes: 1 addition & 1 deletion src/murfey/client/contexts/tomo_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def __init__(
machine_config: dict,
token: str,
):
super().__init__("Tomography_metadata", acquisition_software, token)
super().__init__("TomographyMetadataContext", acquisition_software, token)
self._basepath = basepath
self._machine_config = machine_config

Expand Down
2 changes: 1 addition & 1 deletion tests/client/contexts/test_atlas.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

def test_atlas_context_initialisation(tmp_path):
context = AtlasContext("tomo", tmp_path, {}, "token")
assert context.name == "Atlas"
assert context.name == "AtlasContext"
assert context._acquisition_software == "tomo"
assert context._basepath == tmp_path
assert context._machine_config == {}
Expand Down
Loading
Loading