Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,11 @@ description = "Eiger control system integration with FastCS"
dependencies = [
"aiohttp",
"fastcs[epicsca]",
"fastcs-odin @ git+https://github.com/DiamondLightSource/fastcs-odin.git@0.8.0a1",
"fastcs-odin @ git+https://github.com/DiamondLightSource/fastcs-odin.git@eb1d85bfcd42deb6907a6e41e29f55aafc33f6b5",
"numpy",
"pillow",
"typer",
"h5py",
] # Add project dependencies here, e.g. ["click", "numpy"]
dynamic = ["version"]
license.file = "LICENSE"
Expand All @@ -25,7 +26,7 @@ requires-python = ">=3.11"

[project.optional-dependencies]
dev = [
"tickit-devices @ git+https://github.com/dls-controls/tickit-devices.git@eiger-stream2",
"tickit-devices @ git+https://github.com/dls-controls/tickit-devices.git@main",
"black",
"copier",
"myst-parser",
Expand Down
21 changes: 21 additions & 0 deletions src/fastcs_eiger/controllers/odin/eiger_odin_controller.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,21 @@
import asyncio
from pathlib import Path

from fastcs.attributes import AttrRW
from fastcs.connections import IPConnectionSettings
from fastcs.datatypes import Bool
from fastcs.methods import command

from fastcs_eiger.controllers.eiger_controller import EigerController
from fastcs_eiger.controllers.odin.generate_vds import create_interleave_vds
from fastcs_eiger.controllers.odin.odin_controller import OdinController


class EigerOdinController(EigerController):
"""Eiger controller with Odin sub controller"""

enable_vds_creation = AttrRW(Bool())

def __init__(
self,
detector_connection_settings: IPConnectionSettings,
Expand Down Expand Up @@ -58,3 +64,18 @@ async def start_writing(self):
await self.OD.writing.wait_for_value(True, timeout=1)
except TimeoutError as e:
raise TimeoutError("File writers failed to start") from e

if self.enable_vds_creation.get():
create_interleave_vds(
path=Path(self.OD.file_path.get()),
prefix=self.OD.file_prefix.get(),
datasets=["data1", "data2", "data3"],
frame_count=self.OD.FP.frames.get(),
frames_per_block=self.OD.block_size.get(),
blocks_per_file=self.OD.FP.process_blocks_per_file.get(),
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

process_blocks_per_file needs adding to the EigerFrameProcessorPlugin

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

DiamondLightSource/fastcs-odin#98
Have added this and the other attributes here

frame_shape=(
self.OD.FP.data_dims_1.get(),
self.OD.FP.data_dims_0.get(),
),
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should be OD.FP.data_dims_0/1. These probably don't change if it is in ROI mode.

dtype=self.OD.FP.data_datatype.get(),
)
118 changes: 118 additions & 0 deletions src/fastcs_eiger/controllers/odin/generate_vds.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
import math
from dataclasses import dataclass
from pathlib import Path

import h5py


@dataclass
class FileFrames:
frames: int
start: int
frames_per_block: int

@property
def blocks(self):
return self.frames // self.frames_per_block

@property
def remainder_frames(self):
return self.frames % self.frames_per_block


def _get_frames_per_file_writer(
frame_count: int, frames_per_block: int, n_file_writers: int
) -> list[int]:
n_blocks = math.ceil(frame_count / frames_per_block)
min_blocks_per_file = n_blocks // n_file_writers
remainder = n_blocks - min_blocks_per_file * n_file_writers

frames_per_file_writer = []
for i in range(n_file_writers):
blocks = min_blocks_per_file + (i < remainder)
frames_per_file_writer.append(blocks * frames_per_block)

overflow = sum(frames_per_file_writer) - frame_count
frames_per_file_writer[remainder - 1] -= overflow
return frames_per_file_writer


def _calculate_frame_distribution(
frame_count, frames_per_block, blocks_per_file, n_file_writers
) -> dict[int, FileFrames]:

frame_distribution: dict[int, FileFrames] = {}

max_frames_per_file = (
frames_per_block * blocks_per_file if blocks_per_file else frame_count
)
# total frames written before one of the file writers has to start a new file
frames_before_new_file = n_file_writers * max_frames_per_file
frames_per_file_writer = _get_frames_per_file_writer(
frame_count, frames_per_block, n_file_writers
)
for file_writer_idx, n_frames in enumerate(frames_per_file_writer):
n_files = math.ceil(n_frames / max_frames_per_file)
for i in range(n_files):
file_idx = file_writer_idx + i * n_file_writers

frame_distribution[file_idx + 1] = FileFrames(
frames=min(n_frames - i * max_frames_per_file, max_frames_per_file),
frames_per_block=frames_per_block,
start=frames_per_block * file_writer_idx
+ file_idx // n_file_writers * frames_before_new_file,
)

return frame_distribution


def create_interleave_vds(
path: Path,
prefix: str,
datasets: list[str],
frame_count: int,
frames_per_block: int,
blocks_per_file: int,
frame_shape: tuple[int, int],
dtype: str = "float",
n_file_writers: int = 4,
) -> None:
frame_distribution = _calculate_frame_distribution(
frame_count, frames_per_block, blocks_per_file, n_file_writers
)
stride = n_file_writers * frames_per_block

with h5py.File(f"{path / prefix}_vds.h5", "w", libver="latest") as f:
for dataset_name in datasets:
v_layout = h5py.VirtualLayout(
shape=(frame_count, frame_shape[0], frame_shape[1]),
dtype=dtype,
)
for file_number, file_frames in frame_distribution.items():
full_block_frames = file_frames.blocks * frames_per_block
v_source = h5py.VirtualSource(
f"{path / prefix}_{str(file_number).zfill(6)}.h5",
name=dataset_name,
shape=(file_frames.frames, frame_shape[0], frame_shape[1]),
dtype=dtype,
)
if file_frames.blocks:
source = v_source[:full_block_frames, :, :]
v_layout[
h5py.MultiBlockSlice(
start=file_frames.start,
stride=stride,
count=file_frames.blocks,
block=frames_per_block,
),
:,
:,
] = source
if file_frames.remainder_frames:
# Last few frames that don't fit into a block
source = v_source[full_block_frames : file_frames.frames, :, :]
v_layout[
frame_count - file_frames.remainder_frames : frame_count, :, :
] = source

f.create_virtual_dataset(dataset_name, v_layout)
Loading
Loading