Skip to content

Commit 1fafd8f

Browse files
authored
Register FIB data (#766)
* Added a new FastAPI router, API end point, workflow, and entry point to use for registering FIB atlas images * Added logic to the FIB Context to trigger the registration of FIB atlas images * Added new columns to the ImagingSite table to keep track of stage position information in the FIB * Enabled and fixed a prior workflow that was set up to create GIFs of the milling sites as images are acquired * Added tests and fixed old ones
1 parent 49c52ad commit 1fafd8f

13 files changed

Lines changed: 1009 additions & 404 deletions

File tree

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,7 @@ TomographyMetadataContext = "murfey.client.contexts.tomo_metadata:TomographyMeta
115115
"data_collection" = "murfey.workflows.register_data_collection:run"
116116
"data_collection_group" = "murfey.workflows.register_data_collection_group:run"
117117
"experiment_type_update" = "murfey.workflows.register_experiment_type_update:run"
118+
"fib.register_atlas" = "murfey.workflows.fib.register_atlas:run"
118119
"pato" = "murfey.workflows.notifications:notification_setup"
119120
"picked_particles" = "murfey.workflows.spa.picking:particles_picked"
120121
"picked_tomogram" = "murfey.workflows.tomo.picking:picked_tomogram"

src/murfey/client/analyser.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -400,7 +400,7 @@ def _analyse(self):
400400
mdoc_for_reading = transferred_file
401401
if not self._context:
402402
if not self._find_extension(transferred_file):
403-
logger.error(f"No extension found for {transferred_file}")
403+
logger.debug(f"No extension found for {transferred_file}")
404404
continue
405405
if not self._find_context(transferred_file):
406406
logger.debug(

src/murfey/client/contexts/fib.py

Lines changed: 65 additions & 146 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
from datetime import datetime
77
from pathlib import Path
88
from typing import NamedTuple
9-
from xml.etree import ElementTree as ET
109

1110
import xmltodict
1211

@@ -30,21 +29,6 @@ class MillingProgress(NamedTuple):
3029
timestamp: float
3130

3231

33-
class ElectronSnapshotMetadata(NamedTuple):
34-
slot_num: int | None # Which slot in the FIB-SEM it is from
35-
image_num: int
36-
image_dir: str # Partial path from EMproject.emxml parent to the image
37-
status: str
38-
x_len: float | None
39-
y_len: float | None
40-
z_len: float | None
41-
x_center: float | None
42-
y_center: float | None
43-
z_center: float | None
44-
extent: tuple[float, float, float, float] | None
45-
rotation_angle: float | None
46-
47-
4832
def _number_from_name(name: str) -> int:
4933
"""
5034
In the AutoTEM and Maps workflows for the FIB, the sites and images are
@@ -89,72 +73,6 @@ def _file_transferred_to(
8973
return destination
9074

9175

92-
def _parse_electron_snapshot_metadata(xml_file: Path):
93-
metadata_dict = {}
94-
root = ET.parse(xml_file).getroot()
95-
datasets = root.findall(".//Datasets/Dataset")
96-
for dataset in datasets:
97-
# Extract all string-based values
98-
name, image_dir, status = [
99-
node.text
100-
if ((node := dataset.find(node_path)) is not None and node.text is not None)
101-
else ""
102-
for node_path in (
103-
".//Name",
104-
".//FinalImages",
105-
".//Status",
106-
)
107-
]
108-
109-
# Extract all float values
110-
cx, cy, cz, x_len, y_len, z_len, rotation_angle = [
111-
float(node.text)
112-
if ((node := dataset.find(node_path)) is not None and node.text is not None)
113-
else None
114-
for node_path in (
115-
".//BoxCenter/CenterX",
116-
".//BoxCenter/CenterY",
117-
".//BoxCenter/CenterZ",
118-
".//BoxSize/SizeX",
119-
".//BoxSize/SizeY",
120-
".//BoxSize/SizeZ",
121-
".//RotationAngle",
122-
)
123-
]
124-
125-
# Calculate the extent of the image
126-
extent = None
127-
if (
128-
cx is not None
129-
and cy is not None
130-
and x_len is not None
131-
and y_len is not None
132-
):
133-
extent = (
134-
x_len - (cx / 2),
135-
x_len + (cx / 2),
136-
y_len - (cy / 2),
137-
y_len - (cy / 2),
138-
)
139-
140-
# Append metadata for current site to dict
141-
metadata_dict[name] = ElectronSnapshotMetadata(
142-
slot_num=None if cx is None else (1 if cx < 0 else 2),
143-
image_num=_number_from_name(name),
144-
status=status,
145-
image_dir=image_dir,
146-
x_len=x_len,
147-
y_len=y_len,
148-
z_len=z_len,
149-
x_center=cx,
150-
y_center=cy,
151-
z_center=cz,
152-
extent=extent,
153-
rotation_angle=rotation_angle,
154-
)
155-
return metadata_dict
156-
157-
15876
class FIBContext(Context):
15977
def __init__(
16078
self,
@@ -168,9 +86,6 @@ def __init__(
16886
self._machine_config = machine_config
16987
self._milling: dict[int, list[MillingProgress]] = {}
17088
self._lamellae: dict[int, Lamella] = {}
171-
self._electron_snapshots: dict[str, Path] = {}
172-
self._electron_snapshot_metadata: dict[str, ElectronSnapshotMetadata] = {}
173-
self._electron_snapshots_submitted: set[str] = set()
17489

17590
def post_transfer(
17691
self,
@@ -207,18 +122,35 @@ def post_transfer(
207122
name=lamella_name,
208123
number=lamella_number,
209124
)
125+
if not (source := _get_source(transferred_file, environment)):
126+
logger.warning(f"No source found for file {transferred_file}")
127+
return
128+
if not (
129+
destination_file := _file_transferred_to(
130+
environment=environment,
131+
source=source,
132+
file_path=transferred_file,
133+
rsync_basepath=Path(
134+
self._machine_config.get("rsync_basepath", "")
135+
),
136+
)
137+
):
138+
logger.warning(
139+
f"File {transferred_file.name!r} not found on storage system"
140+
)
141+
return
210142
if not self._milling.get(lamella_number):
211143
self._milling[lamella_number] = [
212144
MillingProgress(
213145
timestamp=timestamp,
214-
file=transferred_file,
146+
file=destination_file,
215147
)
216148
]
217149
else:
218150
self._milling[lamella_number].append(
219151
MillingProgress(
220152
timestamp=timestamp,
221-
file=transferred_file,
153+
file=destination_file,
222154
)
223155
)
224156
gif_list = [
@@ -227,26 +159,25 @@ def post_transfer(
227159
self._milling[lamella_number], key=lambda x: x.timestamp
228160
)
229161
]
230-
if environment:
231-
raw_directory = Path(
232-
environment.default_destinations[self._basepath]
233-
).name
234-
# post gif list to gif making API call
235-
capture_post(
236-
base_url=str(environment.url.geturl()),
237-
router_name="workflow.correlative_router",
238-
function_name="make_gif",
239-
token=self._token,
240-
instrument_name=environment.instrument_name,
241-
year=datetime.now().year,
242-
visit_name=environment.visit,
243-
session_id=environment.murfey_session,
244-
data={
245-
"lamella_number": lamella_number,
246-
"images": gif_list,
247-
"raw_directory": raw_directory,
248-
},
249-
)
162+
raw_directory = Path(
163+
environment.default_destinations[self._basepath]
164+
).name
165+
# Submit job to backend to construct a GIF
166+
capture_post(
167+
base_url=str(environment.url.geturl()),
168+
router_name="workflow.correlative_router",
169+
function_name="make_gif",
170+
token=self._token,
171+
instrument_name=environment.instrument_name,
172+
year=datetime.now().year,
173+
visit_name=environment.visit,
174+
session_id=environment.murfey_session,
175+
data={
176+
"lamella_number": lamella_number,
177+
"images": [str(file) for file in gif_list],
178+
"raw_directory": raw_directory,
179+
},
180+
)
250181
elif transferred_file.name == "ProjectData.dat":
251182
with open(transferred_file, "r") as dat:
252183
try:
@@ -269,34 +200,11 @@ def post_transfer(
269200
# Maps
270201
# -----------------------------------------------------------------------------
271202
elif self._acquisition_software == "maps":
272-
# Electron snapshot metadata file
273-
if transferred_file.name == "EMproject.emxml":
274-
# Extract all "Electron Snapshot" metadata and store it
275-
self._electron_snapshot_metadata = _parse_electron_snapshot_metadata(
276-
transferred_file
277-
)
278-
# If dataset hasn't been transferred, register it
279-
for dataset_name in list(self._electron_snapshot_metadata.keys()):
280-
if dataset_name not in self._electron_snapshots_submitted:
281-
if dataset_name in self._electron_snapshots:
282-
logger.info(f"Registering {dataset_name!r}")
283-
284-
## Workflow to trigger goes here
285-
286-
# Clear old entry after triggering workflow
287-
self._electron_snapshots_submitted.add(dataset_name)
288-
with lock:
289-
self._electron_snapshots.pop(dataset_name, None)
290-
self._electron_snapshot_metadata.pop(dataset_name, None)
291-
else:
292-
logger.debug(f"Waiting for image for {dataset_name}")
293-
# Electron snapshot image
294-
elif (
203+
if (
204+
# Electron snapshot images are grid atlases
295205
"Electron Snapshot" in transferred_file.name
296206
and transferred_file.suffix in (".tif", ".tiff")
297207
):
298-
# Store file in Context memory
299-
dataset_name = transferred_file.stem
300208
if not (source := _get_source(transferred_file, environment)):
301209
logger.warning(f"No source found for file {transferred_file}")
302210
return
@@ -314,24 +222,35 @@ def post_transfer(
314222
f"File {transferred_file.name!r} not found on storage system"
315223
)
316224
return
317-
self._electron_snapshots[dataset_name] = destination_file
318-
319-
if dataset_name not in self._electron_snapshots_submitted:
320-
# If the metadata and image are both present, register dataset
321-
if dataset_name in list(self._electron_snapshot_metadata.keys()):
322-
logger.info(f"Registering {dataset_name!r}")
323225

324-
## Workflow to trigger goes here
226+
# Register image in database
227+
self._register_atlas(destination_file, environment)
228+
return
325229

326-
# Clear old entry after triggering workflow
327-
self._electron_snapshots_submitted.add(dataset_name)
328-
with lock:
329-
self._electron_snapshots.pop(dataset_name, None)
330-
self._electron_snapshot_metadata.pop(dataset_name, None)
331-
else:
332-
logger.debug(f"Waiting for metadata for {dataset_name}")
333230
# -----------------------------------------------------------------------------
334231
# Meteor
335232
# -----------------------------------------------------------------------------
336233
elif self._acquisition_software == "meteor":
337234
pass
235+
236+
def _register_atlas(self, file: Path, environment: MurfeyInstanceEnvironment):
237+
"""
238+
Constructs the URL and dictionary to be posted to the server, which then triggers
239+
the processing of the electron snapshot image.
240+
"""
241+
242+
try:
243+
capture_post(
244+
base_url=str(environment.url.geturl()),
245+
router_name="workflow_fib.router",
246+
function_name="register_fib_atlas",
247+
token=self._token,
248+
instrument_name=environment.instrument_name,
249+
data={"file": str(file)},
250+
session_id=environment.murfey_session,
251+
)
252+
logger.info(f"Registering atlas image {file.name!r}")
253+
return True
254+
except Exception as e:
255+
logger.error(f"Error encountered registering atlas image {file.name}:\n{e}")
256+
return False

src/murfey/server/api/workflow.py

Lines changed: 22 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from pathlib import Path
55
from typing import Any, Dict, List, Optional
66

7+
import numpy as np
78
import sqlalchemy
89
from fastapi import APIRouter, Depends
910
from ispyb.sqlalchemy import (
@@ -1208,29 +1209,43 @@ async def make_gif(
12081209
]
12091210
output_dir = (
12101211
(machine_config.rsync_basepath or Path("")).resolve()
1211-
/ secure_filename(year)
1212+
/ secure_filename(str(year))
12121213
/ secure_filename(visit_name)
12131214
/ "processed"
12141215
)
12151216
output_dir.mkdir(exist_ok=True)
12161217
output_dir = output_dir / secure_filename(gif_params.raw_directory)
12171218
output_dir.mkdir(exist_ok=True)
12181219
output_path = output_dir / f"lamella_{gif_params.lamella_number}_milling.gif"
1219-
image_full_paths = [
1220-
output_dir.parent / gif_params.raw_directory / i for i in gif_params.images
1221-
]
1220+
12221221
if Image is not None:
1223-
images = [Image.open(f) for f in image_full_paths]
1222+
images = [Image.open(f) for f in gif_params.images]
12241223
else:
12251224
images = []
12261225
for im in images:
12271226
im.thumbnail((512, 512))
1228-
images[0].save(
1227+
1228+
# Normalize and convert individual frames to 8-bit
1229+
arr: list[np.ndarray] = []
1230+
for im in images:
1231+
frame = np.array(im).astype(np.float32)
1232+
vmin, vmax = np.percentile(frame, (0.5, 99.5))
1233+
scale = 255 / ((vmax - vmin) or 1)
1234+
np.clip(frame, a_min=vmin, a_max=vmax, out=frame)
1235+
np.subtract(frame, vmin, out=frame)
1236+
np.multiply(frame, scale, out=frame)
1237+
arr.append(frame.astype(np.uint8))
1238+
arr = np.array(arr).astype(np.uint8)
1239+
1240+
# Convert back to Image objects and save as GIF
1241+
converted = [Image.fromarray(arr[f], mode="L") for f in range(len(images))]
1242+
converted[0].save(
12291243
output_path,
12301244
format="GIF",
1231-
append_images=images[1:],
1245+
append_images=converted[1:],
12321246
save_all=True,
12331247
duration=30,
12341248
loop=0,
12351249
)
1250+
12361251
return {"output_gif": str(output_path)}

0 commit comments

Comments
 (0)