From ec7ded3cdde511497e7121ca3ef1dceb1f385724 Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Tue, 7 Oct 2025 20:57:59 +0200 Subject: [PATCH 01/24] add types for normalization workflow --- src/ess/imaging/types.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/src/ess/imaging/types.py b/src/ess/imaging/types.py index 73ce619..c148fb5 100644 --- a/src/ess/imaging/types.py +++ b/src/ess/imaging/types.py @@ -73,4 +73,28 @@ class CountsMasked(sciline.Scope[RunType, sc.DataArray], sc.DataArray): """Detector data with masks.""" +class RawDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """Raw detector counts read from file.""" + + +class CorrectedDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """Corrected detector counts with masking applied.""" + + +class NormalizedDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """Detector counts normalized to proton charge.""" + + +class BackgroundSubtractedDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """Detector counts with dark background subtracted.""" + + +class IntensityDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """Detector counts normalized by open beam measurement.""" + + +class ProtonCharge(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """Proton charge data for a run.""" + + del sc, sciline, NewType, TypeVar From 4ea65e50a9c73619b9f160ed1074184602ef8293 Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Fri, 10 Oct 2025 15:32:40 +0200 Subject: [PATCH 02/24] start adding providers for the standard normalization part of the workflow --- src/ess/imaging/__init__.py | 4 +- src/ess/imaging/masking.py | 29 ++++++++++ src/ess/imaging/normalization.py | 93 ++++++++++++++++++++++++++++++++ src/ess/imaging/types.py | 2 +- 4 files changed, 126 insertions(+), 2 deletions(-) create mode 100644 src/ess/imaging/masking.py create mode 100644 src/ess/imaging/normalization.py diff --git a/src/ess/imaging/__init__.py b/src/ess/imaging/__init__.py index f1c1083..9d49718 100644 --- a/src/ess/imaging/__init__.py +++ b/src/ess/imaging/__init__.py @@ -9,11 +9,13 @@ except importlib.metadata.PackageNotFoundError: __version__ = "0.0.0" -from . import tools +from . import masking, normalization, tools del importlib __all__ = [ "__version__", + "masking", + "normalization", "tools", ] diff --git a/src/ess/imaging/masking.py b/src/ess/imaging/masking.py new file mode 100644 index 0000000..9f929a4 --- /dev/null +++ b/src/ess/imaging/masking.py @@ -0,0 +1,29 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +""" +Contains the providers to apply masks to detector data. +""" + +import scipp as sc + +from ..imaging.types import CorrectedDetector, MaskingRules, RawDetector, RunType + + +def apply_masks( + da: RawDetector[RunType], masks: MaskingRules +) -> CorrectedDetector[RunType]: + out = da.copy(deep=False) + for coord_name, mask in masks.items(): + if (out.bins is not None) and (coord_name in out.bins.coords): + out.bins.masks[coord_name] = mask(out.bins.coords[coord_name]) + else: + coord = ( + sc.midpoints(out.coords[coord_name]) + if out.coords.is_edges(coord_name, coord_name) + else out.coords[coord_name] + ) + out.masks[coord_name] = mask(coord) + return CorrectedDetector[RunType](out) + + +providers = (apply_masks,) diff --git a/src/ess/imaging/normalization.py b/src/ess/imaging/normalization.py new file mode 100644 index 0000000..9e4ed59 --- /dev/null +++ b/src/ess/imaging/normalization.py @@ -0,0 +1,93 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +""" +Contains the providers for normalization. +""" + +from .types import ( + BackgroundSubtractedDetector, + CorrectedDetector, + DarkBackgroundRun, + IntensityDetector, + NormalizedDetector, + OpenBeamRun, + ProtonCharge, + RunType, + SampleRun, +) + + +def normalize_by_proton_charge( + data: CorrectedDetector[RunType], proton_charge: ProtonCharge[RunType] +) -> NormalizedDetector[RunType]: + """ + Normalize detector data by the proton charge. + + Parameters + ---------- + data: + Corrected detector data to be normalized. + proton_charge: + Proton charge data for normalization. + """ + return NormalizedDetector[RunType](data / proton_charge.sum()) + + +def subtract_background_sample( + data: NormalizedDetector[SampleRun], + background: NormalizedDetector[DarkBackgroundRun], +) -> BackgroundSubtractedDetector[SampleRun]: + """ + Subtract background from proton-charge normalized sample data. + + Parameters + ---------- + data: + Sample data to process (normalized to proton charge). + background: + Background (dark frame) data to subtract (normalized to proton charge). + """ + return BackgroundSubtractedDetector[SampleRun](data - background) + + +def subtract_background_openbeam( + data: NormalizedDetector[OpenBeamRun], + background: NormalizedDetector[DarkBackgroundRun], +) -> BackgroundSubtractedDetector[OpenBeamRun]: + """ + Subtract background from proton-charge normalized open beam data. + + Parameters + ---------- + data: + Open beam data to process (normalized to proton charge). + background: + Background (dark frame) data to subtract (normalized to proton charge). + """ + return BackgroundSubtractedDetector[OpenBeamRun](data - background) + + +def sample_over_openbeam( + sample: BackgroundSubtractedDetector[SampleRun], + open_beam: BackgroundSubtractedDetector[OpenBeamRun], +) -> IntensityDetector: + """ + Divide background-subtracted sample data by background-subtracted open beam data, + to obtain final normalized image. + + Parameters + ---------- + sample: + Sample data to process (background subtracted). + open_beam: + Open beam data to divide by (background subtracted). + """ + return IntensityDetector(sample / open_beam) + + +providers = ( + normalize_by_proton_charge, + subtract_background_sample, + subtract_background_openbeam, + sample_over_openbeam, +) diff --git a/src/ess/imaging/types.py b/src/ess/imaging/types.py index c148fb5..ab2b6c9 100644 --- a/src/ess/imaging/types.py +++ b/src/ess/imaging/types.py @@ -90,7 +90,7 @@ class BackgroundSubtractedDetector(sciline.Scope[RunType, sc.DataArray], sc.Data class IntensityDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): - """Detector counts normalized by open beam measurement.""" + """Intensity at the detector normalized by open beam measurement.""" class ProtonCharge(sciline.Scope[RunType, sc.DataArray], sc.DataArray): From 90c8e65d9397b6aff0f21324e971f182a8258a0f Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Fri, 10 Oct 2025 21:14:18 +0200 Subject: [PATCH 03/24] add question about how to perform the normalization by proton charge --- src/ess/imaging/masking.py | 2 ++ src/ess/imaging/normalization.py | 10 ++++++++++ 2 files changed, 12 insertions(+) diff --git a/src/ess/imaging/masking.py b/src/ess/imaging/masking.py index 9f929a4..e76e231 100644 --- a/src/ess/imaging/masking.py +++ b/src/ess/imaging/masking.py @@ -12,6 +12,8 @@ def apply_masks( da: RawDetector[RunType], masks: MaskingRules ) -> CorrectedDetector[RunType]: + if not masks: + return CorrectedDetector[RunType](da) out = da.copy(deep=False) for coord_name, mask in masks.items(): if (out.bins is not None) and (coord_name in out.bins.coords): diff --git a/src/ess/imaging/normalization.py b/src/ess/imaging/normalization.py index 9e4ed59..714ece6 100644 --- a/src/ess/imaging/normalization.py +++ b/src/ess/imaging/normalization.py @@ -30,6 +30,16 @@ def normalize_by_proton_charge( proton_charge: Proton charge data for normalization. """ + + # How do we do this? + # The data can have a time dimension. We want to sum the proton charge inside each + # time bin (defined by the duration of each frame). However, the time dimension of + # the data recorded at the detector is not the same time as the proton charge + # (which is when the protons hit the target). We need to shift the proton charge + # time to account for the time it takes for neutrons to travel from the target + # to the detector. Does this mean we can't do the normalization without computing + # time of flight? + return NormalizedDetector[RunType](data / proton_charge.sum()) From d91fa67f0bc2ad07460379b7eef0f60020ab8e06 Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Mon, 20 Oct 2025 17:12:55 +0200 Subject: [PATCH 04/24] start specializing normalize by proton charge part of workflow for orca detector (which will be different from timepix3 which is event based) --- src/ess/imaging/normalization.py | 29 +-------- src/ess/tbl/orca.py | 101 +++++++++++++++++++++++++++++++ 2 files changed, 103 insertions(+), 27 deletions(-) create mode 100644 src/ess/tbl/orca.py diff --git a/src/ess/imaging/normalization.py b/src/ess/imaging/normalization.py index 714ece6..343c72f 100644 --- a/src/ess/imaging/normalization.py +++ b/src/ess/imaging/normalization.py @@ -4,6 +4,8 @@ Contains the providers for normalization. """ +import scipp as sc + from .types import ( BackgroundSubtractedDetector, CorrectedDetector, @@ -17,32 +19,6 @@ ) -def normalize_by_proton_charge( - data: CorrectedDetector[RunType], proton_charge: ProtonCharge[RunType] -) -> NormalizedDetector[RunType]: - """ - Normalize detector data by the proton charge. - - Parameters - ---------- - data: - Corrected detector data to be normalized. - proton_charge: - Proton charge data for normalization. - """ - - # How do we do this? - # The data can have a time dimension. We want to sum the proton charge inside each - # time bin (defined by the duration of each frame). However, the time dimension of - # the data recorded at the detector is not the same time as the proton charge - # (which is when the protons hit the target). We need to shift the proton charge - # time to account for the time it takes for neutrons to travel from the target - # to the detector. Does this mean we can't do the normalization without computing - # time of flight? - - return NormalizedDetector[RunType](data / proton_charge.sum()) - - def subtract_background_sample( data: NormalizedDetector[SampleRun], background: NormalizedDetector[DarkBackgroundRun], @@ -96,7 +72,6 @@ def sample_over_openbeam( providers = ( - normalize_by_proton_charge, subtract_background_sample, subtract_background_openbeam, sample_over_openbeam, diff --git a/src/ess/tbl/orca.py b/src/ess/tbl/orca.py new file mode 100644 index 0000000..3af3422 --- /dev/null +++ b/src/ess/tbl/orca.py @@ -0,0 +1,101 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +""" +Contains the providers for the orca workflow. +""" + +import sciline as sl +import scipp as sc + +from .. import imaging +from .types import ( + CorrectedDetector, + DarkBackgroundRun, + NormalizedDetector, + OpenBeamRun, + ProtonCharge, + RunType, + SampleRun, +) + + +class ExposureTime(sl.Scope[RunType, sc.DataArray], sc.DataArray): + """Exposure time for a run.""" + + +def normalize_by_proton_charge_orca( + data: CorrectedDetector[RunType], + proton_charge: ProtonCharge[RunType], + exposure_time: ExposureTime[RunType], +) -> NormalizedDetector[RunType]: + """ + Normalize detector data by the proton charge (for dark and open beam runs). + + Parameters + ---------- + data: + Corrected detector data to be normalized. + proton_charge: + Proton charge data for normalization. + """ + + return NormalizedDetector[RunType](data.sum('time') / proton_charge.sum('time')) + + +def normalize_by_proton_charge_orca_sample( + data: CorrectedDetector[SampleRun], + proton_charge: ProtonCharge[SampleRun], + exposure_time: ExposureTime[SampleRun], +) -> NormalizedDetector[SampleRun]: + """ + Normalize sample run detector data by the proton charge. + The handling of the SampleRun is different from the other runs: + The sample may have a time dimension representing multiple frames. + In this case, we need to sum the proton charge for each frame separately. + + Parameters + ---------- + data: + Corrected detector data to be normalized. + proton_charge: + Proton charge data for normalization. + """ + + # How do we do this? + # The data can have a time dimension. We want to sum the proton charge inside each + # time bin (defined by the duration of each frame). However, the time dimension of + # the data recorded at the detector is not the same time as the proton charge + # (which is when the protons hit the target). We need to shift the proton charge + # time to account for the time it takes for neutrons to travel from the target + # to the detector. Does this mean we can't do the normalization without computing + # time of flight? + + time = data.coords['time'] + last_exposure_time = time[-1] - time[-2] + time_bins = sc.concat([time, time[-1] + last_exposure_time], dim='time') + rebinned_proton_charge = proton_charge.hist(time=time_bins) + + # Here we assume the proton charge only has a single time dimension. + return NormalizedDetector[SampleRun]( + data / rebinned_proton_charge.drop_coords('time') + ) + + +# providers = ( +# normalize_by_proton_charge_orca, +# normalize_by_proton_charge_orca_sample, +# ) + + +def OrcaNormalizedImagesWorkflow(**kwargs) -> sl.Pipeline: + """ + Workflow with default parameters for TBL. + """ + + wf = sl.Pipeline( + (*imaging.normalization.providers, *imaging.masking.providers), + constraints={RunType: [SampleRun, OpenBeamRun, DarkBackgroundRun]}, + ) + wf.insert(normalize_by_proton_charge_orca) + wf.insert(normalize_by_proton_charge_orca_sample) + return wf From 238af65223be7e904b0dce721860e161ecfb5509 Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Tue, 21 Oct 2025 14:12:22 +0200 Subject: [PATCH 05/24] fix proton charge normalization --- src/ess/tbl/__init__.py | 2 ++ src/ess/tbl/orca.py | 78 +++++++++++++++++++++++++++++------------ 2 files changed, 57 insertions(+), 23 deletions(-) diff --git a/src/ess/tbl/__init__.py b/src/ess/tbl/__init__.py index c3e8a29..fe5b9d4 100644 --- a/src/ess/tbl/__init__.py +++ b/src/ess/tbl/__init__.py @@ -4,6 +4,7 @@ import importlib.metadata +from . import orca from .workflow import TblWorkflow, default_parameters try: @@ -16,4 +17,5 @@ __all__ = [ "TblWorkflow", "default_parameters", + "orca", ] diff --git a/src/ess/tbl/orca.py b/src/ess/tbl/orca.py index 3af3422..1bae090 100644 --- a/src/ess/tbl/orca.py +++ b/src/ess/tbl/orca.py @@ -7,8 +7,10 @@ import sciline as sl import scipp as sc +from ess.reduce.nexus import GenericNeXusWorkflow + from .. import imaging -from .types import ( +from ..imaging.types import ( CorrectedDetector, DarkBackgroundRun, NormalizedDetector, @@ -23,13 +25,45 @@ class ExposureTime(sl.Scope[RunType, sc.DataArray], sc.DataArray): """Exposure time for a run.""" +def _compute_proton_charge_per_exposure( + data: sc.DataArray, proton_charge: sc.DataArray, exposure_time: sc.DataArray +) -> sc.DataArray: + # A note on timings: + # We want to sum the proton charge inside each time bin (defined by the duration of + # each frame). However, the time dimension of the data recorded at the detector is + # not the same time as the proton charge (which is when the protons hit the + # target). We need to shift the proton charge time to account for the time it takes + # for neutrons to travel from the target to the detector. Does this mean we cannot + # do the normalization without computing time of flight? + + t = data.coords['time'] + exp = exposure_time.to(unit=t.unit) + # The following assumes that the different between successive frames (time stamps) + # is larger than the exposure time. We need to check that this is indeed the case. + if (t[1:] - t[:-1]).min() < exp: + raise ValueError( + "normalize_by_proton_charge_orca: Exposure time is larger than the " + "smallest time between successive frames." + ) + + bins = sc.sort(sc.concat([t, t + exp], dim=t.dim), t.dim) + # We select every second bin, as the odd bins lie between the end of the exposure + # of one frame and the start of the next frame. + return proton_charge.hist(time=bins).data[::2] + + def normalize_by_proton_charge_orca( data: CorrectedDetector[RunType], proton_charge: ProtonCharge[RunType], exposure_time: ExposureTime[RunType], ) -> NormalizedDetector[RunType]: """ - Normalize detector data by the proton charge (for dark and open beam runs). + Normalize detector data by the proton charge (dark and open beam runs). + We find the time stamps for the data, which mark the start of an exposure. + We sum the proton charge within the intervals timestamp to timestamp + + exposure_time. + We then sum the data along the time dimension, and divide by the sum of the proton + charge accumulated during each exposure. Parameters ---------- @@ -37,9 +71,15 @@ def normalize_by_proton_charge_orca( Corrected detector data to be normalized. proton_charge: Proton charge data for normalization. + exposure_time: + Exposure time for each image in the data. """ - return NormalizedDetector[RunType](data.sum('time') / proton_charge.sum('time')) + charge_per_frame = _compute_proton_charge_per_exposure( + data, proton_charge, exposure_time + ) + + return NormalizedDetector[RunType](data.sum('time') / charge_per_frame.sum('time')) def normalize_by_proton_charge_orca_sample( @@ -61,25 +101,13 @@ def normalize_by_proton_charge_orca_sample( Proton charge data for normalization. """ - # How do we do this? - # The data can have a time dimension. We want to sum the proton charge inside each - # time bin (defined by the duration of each frame). However, the time dimension of - # the data recorded at the detector is not the same time as the proton charge - # (which is when the protons hit the target). We need to shift the proton charge - # time to account for the time it takes for neutrons to travel from the target - # to the detector. Does this mean we can't do the normalization without computing - # time of flight? - - time = data.coords['time'] - last_exposure_time = time[-1] - time[-2] - time_bins = sc.concat([time, time[-1] + last_exposure_time], dim='time') - rebinned_proton_charge = proton_charge.hist(time=time_bins) - - # Here we assume the proton charge only has a single time dimension. - return NormalizedDetector[SampleRun]( - data / rebinned_proton_charge.drop_coords('time') + charge_per_frame = _compute_proton_charge_per_exposure( + data, proton_charge, exposure_time ) + # Here we preserve the time dimension of the sample data and the proton charge. + return NormalizedDetector[SampleRun](data / charge_per_frame) + # providers = ( # normalize_by_proton_charge_orca, @@ -92,10 +120,14 @@ def OrcaNormalizedImagesWorkflow(**kwargs) -> sl.Pipeline: Workflow with default parameters for TBL. """ - wf = sl.Pipeline( - (*imaging.normalization.providers, *imaging.masking.providers), - constraints={RunType: [SampleRun, OpenBeamRun, DarkBackgroundRun]}, + wf = GenericNeXusWorkflow( + run_types=[SampleRun, OpenBeamRun, DarkBackgroundRun], + monitor_types=[], + **kwargs, ) + + for provider in (*imaging.normalization.providers, *imaging.masking.providers): + wf.insert(provider) wf.insert(normalize_by_proton_charge_orca) wf.insert(normalize_by_proton_charge_orca_sample) return wf From 5f6b89374c74c24d97a2404c86ef566ab12499d7 Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Tue, 21 Oct 2025 22:45:26 +0200 Subject: [PATCH 06/24] use the constraints on the workflow to load the proton charge and exposure time --- src/ess/imaging/masking.py | 6 ++-- src/ess/imaging/types.py | 15 ++++++++-- src/ess/tbl/orca.py | 60 ++++++++++++++++++++++++++++++-------- 3 files changed, 65 insertions(+), 16 deletions(-) diff --git a/src/ess/imaging/masking.py b/src/ess/imaging/masking.py index e76e231..123cf04 100644 --- a/src/ess/imaging/masking.py +++ b/src/ess/imaging/masking.py @@ -6,11 +6,13 @@ import scipp as sc -from ..imaging.types import CorrectedDetector, MaskingRules, RawDetector, RunType +from ess.reduce.nexus.types import DetectorData + +from ..imaging.types import CorrectedDetector, MaskingRules, RunType def apply_masks( - da: RawDetector[RunType], masks: MaskingRules + da: DetectorData[RunType], masks: MaskingRules ) -> CorrectedDetector[RunType]: if not masks: return CorrectedDetector[RunType](da) diff --git a/src/ess/imaging/types.py b/src/ess/imaging/types.py index ab2b6c9..344902a 100644 --- a/src/ess/imaging/types.py +++ b/src/ess/imaging/types.py @@ -73,8 +73,8 @@ class CountsMasked(sciline.Scope[RunType, sc.DataArray], sc.DataArray): """Detector data with masks.""" -class RawDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): - """Raw detector counts read from file.""" +# class RawDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): +# """Raw detector counts read from file.""" class CorrectedDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): @@ -97,4 +97,15 @@ class ProtonCharge(sciline.Scope[RunType, sc.DataArray], sc.DataArray): """Proton charge data for a run.""" +ProtonChargePath = NewType('ProtonChargePath', str) +"""Path to proton charge inside nexus file""" + + +class ExposureTime(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """Exposure time for a run.""" + + +ExposureTimePath = NewType('ExposureTimePath', str) +"""Path to exposure time inside nexus file""" + del sc, sciline, NewType, TypeVar diff --git a/src/ess/tbl/orca.py b/src/ess/tbl/orca.py index 1bae090..e0e81c2 100644 --- a/src/ess/tbl/orca.py +++ b/src/ess/tbl/orca.py @@ -6,23 +6,41 @@ import sciline as sl import scipp as sc - -from ess.reduce.nexus import GenericNeXusWorkflow +import scippnexus as sx + +from ess.reduce.nexus import GenericNeXusWorkflow, load_component +from ess.reduce.nexus.types import ( + NeXusComponentLocationSpec, + NeXusDetectorName, + NeXusFileSpec, + NeXusName, +) from .. import imaging from ..imaging.types import ( CorrectedDetector, DarkBackgroundRun, + ExposureTime, + ExposureTimePath, NormalizedDetector, OpenBeamRun, ProtonCharge, + ProtonChargePath, RunType, SampleRun, ) -class ExposureTime(sl.Scope[RunType, sc.DataArray], sc.DataArray): - """Exposure time for a run.""" +def load_proton_charge( + location: NeXusComponentLocationSpec[ProtonCharge, RunType], +) -> ProtonCharge[RunType]: + return ProtonCharge[RunType](load_component(location, nx_class=sx.NXlog)["value"]) + + +def load_exposure_time( + location: NeXusComponentLocationSpec[ExposureTime, RunType], +) -> ExposureTime[RunType]: + return ExposureTime[RunType](load_component(location, nx_class=sx.NXlog)) def _compute_proton_charge_per_exposure( @@ -109,10 +127,22 @@ def normalize_by_proton_charge_orca_sample( return NormalizedDetector[SampleRun](data / charge_per_frame) -# providers = ( -# normalize_by_proton_charge_orca, -# normalize_by_proton_charge_orca_sample, -# ) +providers = ( + load_exposure_time, + load_proton_charge, + normalize_by_proton_charge_orca, + normalize_by_proton_charge_orca_sample, +) + + +def default_parameters() -> dict: + return { + # ProtonChargePath: '/entry/neutron_prod_info/pulse_charge', + # ExposureTimePath: '/entry/instrument/detector/exposure_time', + NeXusDetectorName: 'orca_detector', + NeXusName[ProtonCharge]: '/entry/neutron_prod_info/pulse_charge', + NeXusName[ExposureTime]: '/entry/instrument/orca_detector/camera_exposure', + } def OrcaNormalizedImagesWorkflow(**kwargs) -> sl.Pipeline: @@ -122,12 +152,18 @@ def OrcaNormalizedImagesWorkflow(**kwargs) -> sl.Pipeline: wf = GenericNeXusWorkflow( run_types=[SampleRun, OpenBeamRun, DarkBackgroundRun], - monitor_types=[], + # Abusing the monitor_types to load proton charge and exposure time. + # How to we expand the list of possible components? + monitor_types=[ProtonCharge, ExposureTime], **kwargs, ) - for provider in (*imaging.normalization.providers, *imaging.masking.providers): + for provider in ( + *imaging.normalization.providers, + *imaging.masking.providers, + *providers, + ): wf.insert(provider) - wf.insert(normalize_by_proton_charge_orca) - wf.insert(normalize_by_proton_charge_orca_sample) + for key, param in default_parameters().items(): + wf[key] = param return wf From 4ae541478026060da58d11151b4b2daad2af338b Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Wed, 22 Oct 2025 15:03:23 +0200 Subject: [PATCH 07/24] cleanup --- src/ess/imaging/types.py | 7 ------- src/ess/tbl/orca.py | 2 -- 2 files changed, 9 deletions(-) diff --git a/src/ess/imaging/types.py b/src/ess/imaging/types.py index 344902a..7a5ec70 100644 --- a/src/ess/imaging/types.py +++ b/src/ess/imaging/types.py @@ -97,15 +97,8 @@ class ProtonCharge(sciline.Scope[RunType, sc.DataArray], sc.DataArray): """Proton charge data for a run.""" -ProtonChargePath = NewType('ProtonChargePath', str) -"""Path to proton charge inside nexus file""" - - class ExposureTime(sciline.Scope[RunType, sc.DataArray], sc.DataArray): """Exposure time for a run.""" -ExposureTimePath = NewType('ExposureTimePath', str) -"""Path to exposure time inside nexus file""" - del sc, sciline, NewType, TypeVar diff --git a/src/ess/tbl/orca.py b/src/ess/tbl/orca.py index e0e81c2..91f66a1 100644 --- a/src/ess/tbl/orca.py +++ b/src/ess/tbl/orca.py @@ -21,11 +21,9 @@ CorrectedDetector, DarkBackgroundRun, ExposureTime, - ExposureTimePath, NormalizedDetector, OpenBeamRun, ProtonCharge, - ProtonChargePath, RunType, SampleRun, ) From aca88d2901a2e207011d68f00dc85ada08c8b392 Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Tue, 4 Nov 2025 11:48:09 +0100 Subject: [PATCH 08/24] use FluxNormalized to distinguish between normalized by proton-charge and normalized by open beam --- src/ess/imaging/normalization.py | 16 ++++++++-------- src/ess/imaging/types.py | 7 ++++--- src/ess/tbl/orca.py | 14 ++++++++------ 3 files changed, 20 insertions(+), 17 deletions(-) diff --git a/src/ess/imaging/normalization.py b/src/ess/imaging/normalization.py index 343c72f..ff92a33 100644 --- a/src/ess/imaging/normalization.py +++ b/src/ess/imaging/normalization.py @@ -10,8 +10,8 @@ BackgroundSubtractedDetector, CorrectedDetector, DarkBackgroundRun, - IntensityDetector, - NormalizedDetector, + FluxNormalizedDetector, + NormalizedImage, OpenBeamRun, ProtonCharge, RunType, @@ -20,8 +20,8 @@ def subtract_background_sample( - data: NormalizedDetector[SampleRun], - background: NormalizedDetector[DarkBackgroundRun], + data: FluxNormalizedDetector[SampleRun], + background: FluxNormalizedDetector[DarkBackgroundRun], ) -> BackgroundSubtractedDetector[SampleRun]: """ Subtract background from proton-charge normalized sample data. @@ -37,8 +37,8 @@ def subtract_background_sample( def subtract_background_openbeam( - data: NormalizedDetector[OpenBeamRun], - background: NormalizedDetector[DarkBackgroundRun], + data: FluxNormalizedDetector[OpenBeamRun], + background: FluxNormalizedDetector[DarkBackgroundRun], ) -> BackgroundSubtractedDetector[OpenBeamRun]: """ Subtract background from proton-charge normalized open beam data. @@ -56,7 +56,7 @@ def subtract_background_openbeam( def sample_over_openbeam( sample: BackgroundSubtractedDetector[SampleRun], open_beam: BackgroundSubtractedDetector[OpenBeamRun], -) -> IntensityDetector: +) -> NormalizedImage: """ Divide background-subtracted sample data by background-subtracted open beam data, to obtain final normalized image. @@ -68,7 +68,7 @@ def sample_over_openbeam( open_beam: Open beam data to divide by (background subtracted). """ - return IntensityDetector(sample / open_beam) + return NormalizedImage(sample / open_beam) providers = ( diff --git a/src/ess/imaging/types.py b/src/ess/imaging/types.py index 7a5ec70..c465ed0 100644 --- a/src/ess/imaging/types.py +++ b/src/ess/imaging/types.py @@ -81,7 +81,7 @@ class CorrectedDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): """Corrected detector counts with masking applied.""" -class NormalizedDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): +class FluxNormalizedDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): """Detector counts normalized to proton charge.""" @@ -89,8 +89,9 @@ class BackgroundSubtractedDetector(sciline.Scope[RunType, sc.DataArray], sc.Data """Detector counts with dark background subtracted.""" -class IntensityDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): - """Intensity at the detector normalized by open beam measurement.""" +NormalizedImage = NewType('NormalizedImage', sc.DataArray) +"""Final image: background-subtracted sample run divided by background-subtracted open +beam run.""" class ProtonCharge(sciline.Scope[RunType, sc.DataArray], sc.DataArray): diff --git a/src/ess/tbl/orca.py b/src/ess/tbl/orca.py index 91f66a1..5179e6d 100644 --- a/src/ess/tbl/orca.py +++ b/src/ess/tbl/orca.py @@ -21,7 +21,7 @@ CorrectedDetector, DarkBackgroundRun, ExposureTime, - NormalizedDetector, + FluxNormalizedDetector, OpenBeamRun, ProtonCharge, RunType, @@ -38,7 +38,7 @@ def load_proton_charge( def load_exposure_time( location: NeXusComponentLocationSpec[ExposureTime, RunType], ) -> ExposureTime[RunType]: - return ExposureTime[RunType](load_component(location, nx_class=sx.NXlog)) + return ExposureTime[RunType](load_component(location, nx_class=sx.NXlog)["value"]) def _compute_proton_charge_per_exposure( @@ -72,7 +72,7 @@ def normalize_by_proton_charge_orca( data: CorrectedDetector[RunType], proton_charge: ProtonCharge[RunType], exposure_time: ExposureTime[RunType], -) -> NormalizedDetector[RunType]: +) -> FluxNormalizedDetector[RunType]: """ Normalize detector data by the proton charge (dark and open beam runs). We find the time stamps for the data, which mark the start of an exposure. @@ -95,14 +95,16 @@ def normalize_by_proton_charge_orca( data, proton_charge, exposure_time ) - return NormalizedDetector[RunType](data.sum('time') / charge_per_frame.sum('time')) + return FluxNormalizedDetector[RunType]( + data.sum('time') / charge_per_frame.sum('time') + ) def normalize_by_proton_charge_orca_sample( data: CorrectedDetector[SampleRun], proton_charge: ProtonCharge[SampleRun], exposure_time: ExposureTime[SampleRun], -) -> NormalizedDetector[SampleRun]: +) -> FluxNormalizedDetector[SampleRun]: """ Normalize sample run detector data by the proton charge. The handling of the SampleRun is different from the other runs: @@ -122,7 +124,7 @@ def normalize_by_proton_charge_orca_sample( ) # Here we preserve the time dimension of the sample data and the proton charge. - return NormalizedDetector[SampleRun](data / charge_per_frame) + return FluxNormalizedDetector[SampleRun](data / charge_per_frame) providers = ( From 4332a55896caa233fd2691c091e7b154770a8f4d Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Tue, 4 Nov 2025 12:19:44 +0100 Subject: [PATCH 09/24] fix last parts of the workflow, adding uncertainty broadcast mode --- src/ess/imaging/normalization.py | 23 +++++++++++++++++------ src/ess/imaging/types.py | 3 +++ src/ess/tbl/orca.py | 6 ++++-- 3 files changed, 24 insertions(+), 8 deletions(-) diff --git a/src/ess/imaging/normalization.py b/src/ess/imaging/normalization.py index ff92a33..7dc953e 100644 --- a/src/ess/imaging/normalization.py +++ b/src/ess/imaging/normalization.py @@ -4,24 +4,23 @@ Contains the providers for normalization. """ -import scipp as sc +from ess.reduce.uncertainty import broadcast_uncertainties from .types import ( BackgroundSubtractedDetector, - CorrectedDetector, DarkBackgroundRun, FluxNormalizedDetector, NormalizedImage, OpenBeamRun, - ProtonCharge, - RunType, SampleRun, + UncertaintyBroadcastMode, ) def subtract_background_sample( data: FluxNormalizedDetector[SampleRun], background: FluxNormalizedDetector[DarkBackgroundRun], + uncertainties: UncertaintyBroadcastMode, ) -> BackgroundSubtractedDetector[SampleRun]: """ Subtract background from proton-charge normalized sample data. @@ -32,8 +31,13 @@ def subtract_background_sample( Sample data to process (normalized to proton charge). background: Background (dark frame) data to subtract (normalized to proton charge). + uncertainties: + Mode to use when broadcasting uncertainties from background to data (data + typically has multiple frames, while background usually has only one frame). """ - return BackgroundSubtractedDetector[SampleRun](data - background) + return BackgroundSubtractedDetector[SampleRun]( + data - broadcast_uncertainties(background, prototype=data, mode=uncertainties) + ) def subtract_background_openbeam( @@ -56,6 +60,7 @@ def subtract_background_openbeam( def sample_over_openbeam( sample: BackgroundSubtractedDetector[SampleRun], open_beam: BackgroundSubtractedDetector[OpenBeamRun], + uncertainties: UncertaintyBroadcastMode, ) -> NormalizedImage: """ Divide background-subtracted sample data by background-subtracted open beam data, @@ -67,8 +72,14 @@ def sample_over_openbeam( Sample data to process (background subtracted). open_beam: Open beam data to divide by (background subtracted). + uncertainties: + Mode to use when broadcasting uncertainties from open beam to sample (sample + typically has multiple frames, while open beam usually has only one frame). """ - return NormalizedImage(sample / open_beam) + return NormalizedImage( + sample + / broadcast_uncertainties(open_beam, prototype=sample, mode=uncertainties) + ) providers = ( diff --git a/src/ess/imaging/types.py b/src/ess/imaging/types.py index f742a6a..bb95e37 100644 --- a/src/ess/imaging/types.py +++ b/src/ess/imaging/types.py @@ -9,6 +9,7 @@ from ess.reduce.nexus import types as reduce_t from ess.reduce.time_of_flight import types as tof_t +from ess.reduce.uncertainty import UncertaintyBroadcastMode as _UncertaintyBroadcastMode # 1 TypeVars used to parametrize the generic parts of the workflow @@ -27,6 +28,8 @@ TimeOfFlightLookupTable = tof_t.TimeOfFlightLookupTable TimeOfFlightLookupTableFilename = tof_t.TimeOfFlightLookupTableFilename +UncertaintyBroadcastMode = _UncertaintyBroadcastMode + SampleRun = NewType('SampleRun', int) """Sample run; a run with a sample in the beam.""" diff --git a/src/ess/tbl/orca.py b/src/ess/tbl/orca.py index 5179e6d..06b3d11 100644 --- a/src/ess/tbl/orca.py +++ b/src/ess/tbl/orca.py @@ -38,7 +38,9 @@ def load_proton_charge( def load_exposure_time( location: NeXusComponentLocationSpec[ExposureTime, RunType], ) -> ExposureTime[RunType]: - return ExposureTime[RunType](load_component(location, nx_class=sx.NXlog)["value"]) + return ExposureTime[RunType]( + load_component(location, nx_class=sx.NXlog)["value"].squeeze() + ) def _compute_proton_charge_per_exposure( @@ -53,7 +55,7 @@ def _compute_proton_charge_per_exposure( # do the normalization without computing time of flight? t = data.coords['time'] - exp = exposure_time.to(unit=t.unit) + exp = exposure_time.data.to(unit=t.unit) # The following assumes that the different between successive frames (time stamps) # is larger than the exposure time. We need to check that this is indeed the case. if (t[1:] - t[:-1]).min() < exp: From 49d1f9c062ef19717bd78cf54f1ded6c569ec0bb Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Tue, 4 Nov 2025 12:20:39 +0100 Subject: [PATCH 10/24] remove unused import --- src/ess/tbl/orca.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/ess/tbl/orca.py b/src/ess/tbl/orca.py index 06b3d11..fb1bf8a 100644 --- a/src/ess/tbl/orca.py +++ b/src/ess/tbl/orca.py @@ -12,7 +12,6 @@ from ess.reduce.nexus.types import ( NeXusComponentLocationSpec, NeXusDetectorName, - NeXusFileSpec, NeXusName, ) From 1a18dde8328e77ccadd0424d87307874e16b7bab Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Fri, 7 Nov 2025 12:25:51 +0100 Subject: [PATCH 11/24] add notebook that made tbl images from ymir data --- tools/make-tbl-images-from-ymir.ipynb | 259 ++++++++++++++++++++++++++ 1 file changed, 259 insertions(+) create mode 100644 tools/make-tbl-images-from-ymir.ipynb diff --git a/tools/make-tbl-images-from-ymir.ipynb b/tools/make-tbl-images-from-ymir.ipynb new file mode 100644 index 0000000..7c1ff22 --- /dev/null +++ b/tools/make-tbl-images-from-ymir.ipynb @@ -0,0 +1,259 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Make TBL dark, open beam and sample images from Ymir data\n", + "\n", + "This notebook creates files for TBL that contain images in the Orca detector file entry.\n", + "We used a TBL file written by ECDC as a template (`857127_00000212.hdf`) and replaced the orca detector data with images recorded at Ymir.\n", + "\n", + "The Ymir data was a single file containing dark frame, open beam, and sample data.\n", + "We split this into 3 separate datasets and files, as this is the way TBL would like to operate (at least initially).\n", + "\n", + "We then add an arbitrary exposure time entry in the file for the images.\n", + "Finally, we generate a fake proton-charge log and place it inside the `neutron_prod_info` group.\n", + "The proton charge is a time series from the first frame time to the last frame time + exposure time, where the value is randomly oscillating around 1." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import h5py as h5\n", + "import numpy as np\n", + "import scipp as sc" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from ess.ymir.data import ymir_lego_images_path" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load Ymir data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "path = 'entry/instrument/histogram_mode_detectors/orca/data/'\n", + "\n", + "with h5.File(ymir_lego_images_path()) as f:\n", + " lego_data = f[path+'value'][()]\n", + " lego_time = f[path+'time'][()]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Split into dark, open beam, and sample" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dark_data = lego_data[:5, ...]\n", + "dark_time = lego_time[:5, ...]\n", + "\n", + "ob_data = lego_data[5:10, ...]\n", + "ob_time = lego_time[5:10, ...]\n", + "\n", + "sample_data = lego_data[10:, ...]\n", + "sample_time = lego_time[10:, ...]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create fake proton charge" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "exposure_time = int(5e8) # 0.5s (made up)\n", + "period = 1e9/14\n", + "\n", + "dark_charge_time = np.arange(float(dark_time[0]), float(dark_time[-1] + exposure_time), period).astype('uint64')\n", + "dark_charge_value = np.random.uniform(0.99, 1.01, size=len(dark_charge_time)).astype('float64')\n", + "\n", + "ob_charge_time = np.arange(float(ob_time[0]), float(ob_time[-1] + exposure_time), period).astype('uint64')\n", + "ob_charge_value = np.random.uniform(0.99, 1.01, size=len(ob_charge_time)).astype('float64')\n", + "\n", + "sample_charge_time = np.arange(float(sample_time[0]), float(sample_time[-1] + exposure_time), period).astype('uint64')\n", + "sample_charge_value = np.random.uniform(0.99, 1.01, size=len(sample_charge_time)).astype('float64')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Make pixel offsets" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "npix = dark_data.shape[-1]\n", + "\n", + "x_pixel_offset = sc.linspace('dim_1', -0.1, 0.1, npix, unit='m')\n", + "y_pixel_offset = sc.linspace('dim_2', -0.1, 0.1, npix, unit='m')\n", + "x_pixel_offset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create new files from template" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def replace_dataset(entry, name, values):\n", + " attrs = dict(entry[name].attrs)\n", + " del entry[name]\n", + " dset = entry.create_dataset(name, data=values)\n", + " dset.attrs.update(attrs)\n", + "\n", + "def make_new_file(template_nexus_file, outfile, data, proton_charge):\n", + " import shutil\n", + " shutil.copyfile(template_nexus_file, outfile)\n", + "\n", + " # detector_path = 'entry/instrument/histogram_mode_detectors/orca' # ODIN\n", + " detector_path = 'entry/instrument/orca_detector/' # TBL\n", + " proton_charge_path = 'entry/neutron_prod_info/pulse_charge'\n", + " \n", + " with h5.File(outfile, \"r+\") as f:\n", + "\n", + " detector_data = f[detector_path+\"data\"]\n", + " replace_dataset(detector_data, name=\"value\", values=data[\"value\"])\n", + " replace_dataset(detector_data, name=\"time\", values=data[\"time\"])\n", + "\n", + " detector = f[detector_path]\n", + " detector.copy('intensifier', detector, 'camera_exposure')\n", + " exp = f[detector_path+\"camera_exposure\"]\n", + " replace_dataset(exp, name=\"value\", values=np.array([exposure_time]))\n", + " replace_dataset(exp, name=\"average_value\", values=exposure_time)\n", + " replace_dataset(exp, name=\"minimum_value\", values=exposure_time)\n", + " replace_dataset(exp, name=\"maximum_value\", values=exposure_time)\n", + " f[detector_path+\"camera_exposure/value\"].attrs['units'] = 'ns'\n", + " f[detector_path+\"camera_exposure/average_value\"].attrs['units'] = 'ns'\n", + " f[detector_path+\"camera_exposure/minimum_value\"].attrs['units'] = 'ns'\n", + " f[detector_path+\"camera_exposure/maximum_value\"].attrs['units'] = 'ns'\n", + "\n", + " xoff = detector.create_dataset('x_pixel_offset', data=x_pixel_offset.values)\n", + " xoff.attrs['units'] = str(x_pixel_offset.unit)\n", + " xoff.attrs['axis'] = 1\n", + " yoff = detector.create_dataset('y_pixel_offset', data=y_pixel_offset.values)\n", + " yoff.attrs['units'] = str(y_pixel_offset.unit)\n", + " yoff.attrs['axis'] = 2\n", + " detector.attrs['axes'] = ['dim_2', 'dim_1']\n", + "\n", + " pcharge = f[proton_charge_path]\n", + " replace_dataset(pcharge, name=\"value\", values=proton_charge[\"value\"])\n", + " replace_dataset(pcharge, name=\"time\", values=proton_charge[\"time\"])\n", + " replace_dataset(pcharge, name=\"average_value\", values=proton_charge[\"value\"].mean())\n", + " replace_dataset(pcharge, name=\"minimum_value\", values=proton_charge[\"value\"].min())\n", + " replace_dataset(pcharge, name=\"maximum_value\", values=proton_charge[\"value\"].max())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "template_nexus_file = \"857127_00000212.hdf\"\n", + "\n", + "# Dark run\n", + "make_new_file(\n", + " template_nexus_file=template_nexus_file,\n", + " outfile=\"ymir_lego_dark_run.hdf\",\n", + " data={\"value\": dark_data, \"time\": dark_time},\n", + " proton_charge={\"value\": dark_charge_value, \"time\": dark_charge_time}\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Openbeam run\n", + "make_new_file(\n", + " template_nexus_file=template_nexus_file,\n", + " outfile=\"ymir_lego_openbeam_run.hdf\",\n", + " data={\"value\": ob_data, \"time\": ob_time},\n", + " proton_charge={\"value\": ob_charge_value, \"time\": ob_charge_time}\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Sample run\n", + "make_new_file(\n", + " template_nexus_file=template_nexus_file,\n", + " outfile=\"ymir_lego_sample_run.hdf\",\n", + " data={\"value\": sample_data, \"time\": sample_time},\n", + " proton_charge={\"value\": sample_charge_value, \"time\": sample_charge_time}\n", + ")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.7" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} From b73f6e4b8cae1a453aca1983f66cb8285c7410ac Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Fri, 7 Nov 2025 13:33:03 +0100 Subject: [PATCH 12/24] update data registry with new lego files --- src/ess/tbl/data.py | 38 +++++++++++++++++++++++++++++++++++++- 1 file changed, 37 insertions(+), 1 deletion(-) diff --git a/src/ess/tbl/data.py b/src/ess/tbl/data.py index b9a2c6d..444388c 100644 --- a/src/ess/tbl/data.py +++ b/src/ess/tbl/data.py @@ -8,13 +8,16 @@ _registry = make_registry( 'ess/tbl', - version="1", + version="2", files={ "tbl_sample_data_2025-03.hdf": "md5:12db6bc06721278b3abe47992eac3e77", "TBL-tof-lookup-table-no-choppers.h5": "md5:8bc98fac0ee64fc8f5decf509c75bafe", 'tbl-orca-focussing.hdf.zip': Entry( alg='md5', chk='f365acd9ea45dd205c0b9398d163cfa4', unzip=True ), + "ymir_lego_dark_run.hdf": "md5:c0ed089dd7663986042e29fb47514130", + "ymir_lego_openbeam_run.hdf": "md5:00375becd54d2ed3be096413dc30883c", + "ymir_lego_sample_run.hdf": "md5:ae56a335cf3d4e87ef090ec4e51da69c", }, ) @@ -44,3 +47,36 @@ def tbl_orca_focussing_data() -> pathlib.Path: """ return _registry.get_path('tbl-orca-focussing.hdf.zip') + + +def tbl_lego_dark_run() -> pathlib.Path: + """ + Return the path to the TBL LEGO dark run HDF5 file, created from the YMIR data. + This file was created using the tools/make-tbl-images-from-ymir.ipynb notebook. + A TBL file (857127_00000212.hdf) was used as a template for the NeXus structure. + The dark run data was extracted from the YMIR LEGO run (first 5 frames). + """ + + return _registry.get_path("ymir_lego_dark_run.hdf") + + +def ymir_lego_openbeam_run() -> pathlib.Path: + """ + Return the path to the TBL LEGO open beam run HDF5 file, created from the YMIR data. + This file was created using the tools/make-tbl-images-from-ymir.ipynb notebook. + A TBL file (857127_00000212.hdf) was used as a template for the NeXus structure. + The open beam run data was extracted from the YMIR LEGO run (frames 5 to 10). + """ + + return _registry.get_path("ymir_lego_openbeam_run.hdf") + + +def ymir_lego_sample_run() -> pathlib.Path: + """ + Return the path to the TBL LEGO sample run HDF5 file, created from the YMIR data. + This file was created using the tools/make-tbl-images-from-ymir.ipynb notebook. + A TBL file (857127_00000212.hdf) was used as a template for the NeXus structure. + The sample run data was extracted from the YMIR LEGO run (frames 10 and onward). + """ + + return _registry.get_path("ymir_lego_sample_run.hdf") From 75bd8396a0e02a9c10b5ab986d470af9754269e7 Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Fri, 7 Nov 2025 14:25:55 +0100 Subject: [PATCH 13/24] add image normalization workflow tests --- src/ess/tbl/data.py | 4 +- tests/tbl/orca_test.py | 110 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 112 insertions(+), 2 deletions(-) create mode 100644 tests/tbl/orca_test.py diff --git a/src/ess/tbl/data.py b/src/ess/tbl/data.py index 444388c..d01c58d 100644 --- a/src/ess/tbl/data.py +++ b/src/ess/tbl/data.py @@ -60,7 +60,7 @@ def tbl_lego_dark_run() -> pathlib.Path: return _registry.get_path("ymir_lego_dark_run.hdf") -def ymir_lego_openbeam_run() -> pathlib.Path: +def tbl_lego_openbeam_run() -> pathlib.Path: """ Return the path to the TBL LEGO open beam run HDF5 file, created from the YMIR data. This file was created using the tools/make-tbl-images-from-ymir.ipynb notebook. @@ -71,7 +71,7 @@ def ymir_lego_openbeam_run() -> pathlib.Path: return _registry.get_path("ymir_lego_openbeam_run.hdf") -def ymir_lego_sample_run() -> pathlib.Path: +def tbl_lego_sample_run() -> pathlib.Path: """ Return the path to the TBL LEGO sample run HDF5 file, created from the YMIR data. This file was created using the tools/make-tbl-images-from-ymir.ipynb notebook. diff --git a/tests/tbl/orca_test.py b/tests/tbl/orca_test.py new file mode 100644 index 0000000..f784b65 --- /dev/null +++ b/tests/tbl/orca_test.py @@ -0,0 +1,110 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) + +import pytest +import sciline as sl +import scipp as sc +import scippnexus as sx +from scipp.testing import assert_identical + +import ess.tbl.data # noqa: F401 +from ess import imaging, tbl +from ess.imaging.types import ( + BackgroundSubtractedDetector, + CorrectedDetector, + DarkBackgroundRun, + Filename, + FluxNormalizedDetector, + MaskingRules, + NeXusDetectorName, + NormalizedImage, + OpenBeamRun, + Position, + ProtonCharge, + RawDetector, + SampleRun, + UncertaintyBroadcastMode, +) +from ess.tbl import orca + + +@pytest.fixture +def workflow() -> sl.Pipeline: + """ + Workflow for normalizing TBL Orca images. + """ + + wf = orca.OrcaNormalizedImagesWorkflow() + wf[Filename[SampleRun]] = tbl.data.tbl_lego_sample_run() + wf[Filename[DarkBackgroundRun]] = tbl.data.tbl_lego_dark_run() + wf[Filename[OpenBeamRun]] = tbl.data.tbl_lego_openbeam_run() + wf[MaskingRules] = {} + wf[NeXusDetectorName] = 'orca_detector' + wf[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.upper_bound + wf[Position[sx.NXsample, SampleRun]] = sc.vector([0.0, 0.0, 0.0], unit='m') + return wf + + +@pytest.mark.parametrize("run", [SampleRun, OpenBeamRun, DarkBackgroundRun]) +def test_workflow_loads_raw_data(workflow, run): + da = workflow.compute(RawDetector[run]) + assert "position" in da.coords + assert "time" in da.coords + assert da.ndim == 3 + assert "time" in da.dims + + +@pytest.mark.parametrize("run", [SampleRun, OpenBeamRun, DarkBackgroundRun]) +def test_workflow_loads_proton_charge(workflow, run): + pc = workflow.compute(ProtonCharge[run]) + assert "time" in pc.coords + assert "time" in pc.dims + assert pc.unit == "uC" + + +@pytest.mark.parametrize("run", [SampleRun, OpenBeamRun, DarkBackgroundRun]) +def test_workflow_applies_masks(workflow, run): + workflow[MaskingRules] = { + 'y_pixel_offset': lambda x: x > sc.scalar(0.082, unit='m') + } + da = workflow.compute(RawDetector[run]) + masked_da = workflow.compute(CorrectedDetector[run]) + assert 'y_pixel_offset' in masked_da.masks + assert da.sum().value > masked_da.sum().value + + +@pytest.mark.parametrize("run", [OpenBeamRun, DarkBackgroundRun]) +def test_workflow_normalizes_by_proton_charge(workflow, run): + da = workflow.compute(FluxNormalizedDetector[run]) + # Dark and open beam runs have been averaged over time dimension + assert da.ndim == 2 + assert "time" not in da.dims + # TODO: should it be "counts / uC"? + assert da.unit == "1 / uC" + + +def test_workflow_normalizes_sample_by_proton_charge(workflow): + da = workflow.compute(FluxNormalizedDetector[SampleRun]) + # Sample run retains time dimension + assert da.ndim == 3 + assert "time" in da.dims + # TODO: should it be "counts / uC"? + assert da.unit == "1 / uC" + + +@pytest.mark.parametrize("run", [OpenBeamRun, SampleRun]) +def test_workflow_subtracts_dark_background(workflow, run): + background = workflow.compute(FluxNormalizedDetector[DarkBackgroundRun]) + before = workflow.compute(FluxNormalizedDetector[run]) + after = workflow.compute(BackgroundSubtractedDetector[run]) + + assert_identical(sc.values(after), sc.values(before) - sc.values(background)) + + +def test_workflow_computes_normalized_image(workflow): + sample = workflow.compute(BackgroundSubtractedDetector[SampleRun]) + open_beam = workflow.compute(BackgroundSubtractedDetector[OpenBeamRun]) + normalized = workflow.compute(NormalizedImage) + + assert_identical(sc.values(normalized), sc.values(sample) / sc.values(open_beam)) + assert normalized.unit == sc.units.dimensionless From c7fd7b05c0168ec88717ed49bcb0357dcaa7ac40 Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Fri, 7 Nov 2025 14:40:26 +0100 Subject: [PATCH 14/24] add notebook to docs --- docs/tbl/index.md | 1 + docs/tbl/orca-image-normalization.ipynb | 173 ++++++++++++++++++++++++ 2 files changed, 174 insertions(+) create mode 100644 docs/tbl/orca-image-normalization.ipynb diff --git a/docs/tbl/index.md b/docs/tbl/index.md index 691a1e0..9e5ac93 100644 --- a/docs/tbl/index.md +++ b/docs/tbl/index.md @@ -6,5 +6,6 @@ maxdepth: 1 --- tbl-data-reduction +orca-image-normalization tbl-make-tof-lookup-table ``` diff --git a/docs/tbl/orca-image-normalization.ipynb b/docs/tbl/orca-image-normalization.ipynb new file mode 100644 index 0000000..1d3e0d4 --- /dev/null +++ b/docs/tbl/orca-image-normalization.ipynb @@ -0,0 +1,173 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a22169ac-8db4-4326-b7c2-aae132a0c250", + "metadata": {}, + "source": [ + "# TBL: Orca image normalization workflow\n", + "\n", + "This notebook shows how to use the workflow to compute normalized images recorded by the Orca detector on the TBL instrument." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8359064e-6768-4a61-aa48-ff35c06a8800", + "metadata": {}, + "outputs": [], + "source": [ + "from ess import imaging\n", + "from ess.tbl import orca\n", + "from ess.imaging.types import *\n", + "import scipp as sc\n", + "import plopp as pp\n", + "import scippnexus as sx\n", + "\n", + "%matplotlib widget" + ] + }, + { + "cell_type": "markdown", + "id": "863f42c6-0663-496a-905c-40953b764c1b", + "metadata": {}, + "source": [ + "## Workflow setup" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "aad744e6-f30c-40c3-b169-a1fe8d8fa1ca", + "metadata": {}, + "outputs": [], + "source": [ + "wf = orca.OrcaNormalizedImagesWorkflow()\n", + "\n", + "wf[Filename[SampleRun]] = \"ymir_lego_sample_run.hdf\"\n", + "wf[Filename[DarkBackgroundRun]] = \"ymir_lego_dark_run.hdf\"\n", + "wf[Filename[OpenBeamRun]] = \"ymir_lego_openbeam_run.hdf\"\n", + "wf[NeXusDetectorName] = 'orca_detector'\n", + "\n", + "wf[MaskingRules] = {} # No masks to begin with\n", + "wf[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.upper_bound" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d8aeda1a-ab46-4a96-91c8-6de93a104011", + "metadata": {}, + "outputs": [], + "source": [ + "wf.visualize(NormalizedImage, compact=True, graph_attr={\"rankdir\": \"LR\"})" + ] + }, + { + "cell_type": "markdown", + "id": "b4118540-5947-46d9-8ebb-0a63fa572c4a", + "metadata": {}, + "source": [ + "## Run the workflow\n", + "\n", + "We compute the final normalized image:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c35b2155-adee-4a4f-804e-cfdc93983524", + "metadata": {}, + "outputs": [], + "source": [ + "image = wf.compute(NormalizedImage)\n", + "image" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1f868995-e9bf-48ab-8d23-5fc43d703c3b", + "metadata": {}, + "outputs": [], + "source": [ + "pp.slicer(image)" + ] + }, + { + "cell_type": "markdown", + "id": "7f428da9-527e-4d63-95bf-f691b15f69f0", + "metadata": {}, + "source": [ + "## Adding masks\n", + "\n", + "If we want to mask some part of the image, we update the masking rules.\n", + "For example, here we mask the upper part of the image:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "822e8919-b6cf-473f-8aba-d6d6c08aca12", + "metadata": {}, + "outputs": [], + "source": [ + "wf[MaskingRules] = {'y_pixel_offset': lambda x: x > sc.scalar(0.082, unit='m')}\n", + "\n", + "pp.slicer(wf.compute(NormalizedImage))" + ] + }, + { + "cell_type": "markdown", + "id": "032852b7-82f0-43d9-88a2-40719b85d8a0", + "metadata": {}, + "source": [ + "## Intermediate results\n", + "\n", + "We can also inspect intermediate results, which is useful for debugging:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e6ba8f43-84b4-4bbc-a300-522c952a38df", + "metadata": {}, + "outputs": [], + "source": [ + "results = wf.compute([\n", + " RawDetector[SampleRun],\n", + " CorrectedDetector[SampleRun],\n", + " BackgroundSubtractedDetector[SampleRun]\n", + "])\n", + "\n", + "fig = pp.tiled(2, 2, hspace=0.3, wspace=0.3)\n", + "fig[0, 0] = results[RawDetector[SampleRun]]['time', 0].plot(title='Raw data')\n", + "fig[0, 1] = results[CorrectedDetector[SampleRun]]['time', 0].plot(title='Masks applied')\n", + "fig[1, 0] = results[BackgroundSubtractedDetector[SampleRun]]['time', 0].plot(title='Background subtracted')\n", + "fig[1, 1] = image['time', 0].plot(title='Final image')\n", + "fig" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.7" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From 9b88bc53ac9071b1b68d426359d6f4cf001913e1 Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Fri, 7 Nov 2025 14:42:42 +0100 Subject: [PATCH 15/24] formatting --- docs/tbl/orca-image-normalization.ipynb | 26 ++++++++++++------------- tests/tbl/orca_test.py | 2 +- tools/make-tbl-images-from-ymir.ipynb | 2 +- 3 files changed, 14 insertions(+), 16 deletions(-) diff --git a/docs/tbl/orca-image-normalization.ipynb b/docs/tbl/orca-image-normalization.ipynb index 1d3e0d4..c578e4a 100644 --- a/docs/tbl/orca-image-normalization.ipynb +++ b/docs/tbl/orca-image-normalization.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "a22169ac-8db4-4326-b7c2-aae132a0c250", + "id": "0", "metadata": {}, "source": [ "# TBL: Orca image normalization workflow\n", @@ -13,23 +13,21 @@ { "cell_type": "code", "execution_count": null, - "id": "8359064e-6768-4a61-aa48-ff35c06a8800", + "id": "1", "metadata": {}, "outputs": [], "source": [ - "from ess import imaging\n", "from ess.tbl import orca\n", "from ess.imaging.types import *\n", "import scipp as sc\n", "import plopp as pp\n", - "import scippnexus as sx\n", "\n", "%matplotlib widget" ] }, { "cell_type": "markdown", - "id": "863f42c6-0663-496a-905c-40953b764c1b", + "id": "2", "metadata": {}, "source": [ "## Workflow setup" @@ -38,7 +36,7 @@ { "cell_type": "code", "execution_count": null, - "id": "aad744e6-f30c-40c3-b169-a1fe8d8fa1ca", + "id": "3", "metadata": {}, "outputs": [], "source": [ @@ -56,7 +54,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d8aeda1a-ab46-4a96-91c8-6de93a104011", + "id": "4", "metadata": {}, "outputs": [], "source": [ @@ -65,7 +63,7 @@ }, { "cell_type": "markdown", - "id": "b4118540-5947-46d9-8ebb-0a63fa572c4a", + "id": "5", "metadata": {}, "source": [ "## Run the workflow\n", @@ -76,7 +74,7 @@ { "cell_type": "code", "execution_count": null, - "id": "c35b2155-adee-4a4f-804e-cfdc93983524", + "id": "6", "metadata": {}, "outputs": [], "source": [ @@ -87,7 +85,7 @@ { "cell_type": "code", "execution_count": null, - "id": "1f868995-e9bf-48ab-8d23-5fc43d703c3b", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -96,7 +94,7 @@ }, { "cell_type": "markdown", - "id": "7f428da9-527e-4d63-95bf-f691b15f69f0", + "id": "8", "metadata": {}, "source": [ "## Adding masks\n", @@ -108,7 +106,7 @@ { "cell_type": "code", "execution_count": null, - "id": "822e8919-b6cf-473f-8aba-d6d6c08aca12", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -119,7 +117,7 @@ }, { "cell_type": "markdown", - "id": "032852b7-82f0-43d9-88a2-40719b85d8a0", + "id": "10", "metadata": {}, "source": [ "## Intermediate results\n", @@ -130,7 +128,7 @@ { "cell_type": "code", "execution_count": null, - "id": "e6ba8f43-84b4-4bbc-a300-522c952a38df", + "id": "11", "metadata": {}, "outputs": [], "source": [ diff --git a/tests/tbl/orca_test.py b/tests/tbl/orca_test.py index f784b65..425eb00 100644 --- a/tests/tbl/orca_test.py +++ b/tests/tbl/orca_test.py @@ -8,7 +8,7 @@ from scipp.testing import assert_identical import ess.tbl.data # noqa: F401 -from ess import imaging, tbl +from ess import tbl from ess.imaging.types import ( BackgroundSubtractedDetector, CorrectedDetector, diff --git a/tools/make-tbl-images-from-ymir.ipynb b/tools/make-tbl-images-from-ymir.ipynb index 7c1ff22..d30aeb4 100644 --- a/tools/make-tbl-images-from-ymir.ipynb +++ b/tools/make-tbl-images-from-ymir.ipynb @@ -152,7 +152,7 @@ " # detector_path = 'entry/instrument/histogram_mode_detectors/orca' # ODIN\n", " detector_path = 'entry/instrument/orca_detector/' # TBL\n", " proton_charge_path = 'entry/neutron_prod_info/pulse_charge'\n", - " \n", + "\n", " with h5.File(outfile, \"r+\") as f:\n", "\n", " detector_data = f[detector_path+\"data\"]\n", From 591ded1928a7a1d26bf668c92fcf87e7decd12e4 Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Fri, 7 Nov 2025 14:43:23 +0100 Subject: [PATCH 16/24] require essreduce>=25.11.1 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 453643a..9102ab1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,7 @@ dependencies = [ "scippneutron>=24.12.0", "scippnexus>=23.11.1", "tifffile>=2024.7.2", - "essreduce>=25.11.0", + "essreduce>=25.11.1", "scitiff>=25.7", ] From c76cfcdd33e0d3abc12adc343e56fd4b8450724c Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Fri, 7 Nov 2025 14:45:13 +0100 Subject: [PATCH 17/24] update deps --- requirements/base.in | 2 +- requirements/base.txt | 12 ++++++------ requirements/basetest.txt | 4 ++-- requirements/nightly.txt | 8 ++++---- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/requirements/base.in b/requirements/base.in index ff57099..b21641f 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -10,5 +10,5 @@ scipp>=25.4.0 scippneutron>=24.12.0 scippnexus>=23.11.1 tifffile>=2024.7.2 -essreduce>=25.11.0 +essreduce>=25.11.1 scitiff>=25.7 diff --git a/requirements/base.txt b/requirements/base.txt index 63dd3ee..f55add7 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,4 +1,4 @@ -# SHA1:da99ba40cef426287cd34dd1a2b56aefd0cb24e6 +# SHA1:793b699242696bedfa9d4b24286c04a03b149091 # # This file was generated by pip-compile-multi. # To update, run: @@ -25,7 +25,7 @@ cyclebane==24.10.0 # via sciline cycler==0.12.1 # via matplotlib -dask==2025.10.0 +dask==2025.11.0 # via -r base.in decorator==5.2.1 # via ipython @@ -33,7 +33,7 @@ dnspython==2.8.0 # via email-validator email-validator==2.3.0 # via scippneutron -essreduce==25.11.0 +essreduce==25.11.1 # via -r base.in executing==2.2.1 # via stack-data @@ -57,7 +57,7 @@ ipydatawidgets==4.3.5 # via pythreejs ipympl==0.9.8 # via plopp -ipython==9.6.0 +ipython==9.7.0 # via # ipympl # ipywidgets @@ -135,11 +135,11 @@ ptyprocess==0.7.0 # via pexpect pure-eval==0.2.3 # via stack-data -pydantic==2.12.3 +pydantic==2.12.4 # via # scippneutron # scitiff -pydantic-core==2.41.4 +pydantic-core==2.41.5 # via pydantic pygments==2.19.2 # via diff --git a/requirements/basetest.txt b/requirements/basetest.txt index cee4182..860519a 100644 --- a/requirements/basetest.txt +++ b/requirements/basetest.txt @@ -60,9 +60,9 @@ pluggy==1.6.0 # via pytest pooch==1.8.2 # via -r basetest.in -pydantic==2.12.3 +pydantic==2.12.4 # via scitiff -pydantic-core==2.41.4 +pydantic-core==2.41.5 # via pydantic pygments==2.19.2 # via pytest diff --git a/requirements/nightly.txt b/requirements/nightly.txt index 93e125c..78f52cf 100644 --- a/requirements/nightly.txt +++ b/requirements/nightly.txt @@ -28,7 +28,7 @@ cyclebane==24.10.0 # via sciline cycler==0.12.1 # via matplotlib -dask==2025.10.0 +dask==2025.11.0 # via -r nightly.in dnspython==2.8.0 # via email-validator @@ -73,7 +73,7 @@ matplotlib==3.10.7 # plopp mpltoolbox==25.10.0 # via scippneutron -networkx==3.5 +networkx==3.6rc0 # via cyclebane numpy==2.3.4 # via @@ -106,11 +106,11 @@ pluggy==1.6.0 # via pytest pooch==1.8.2 # via -r nightly.in -pydantic==2.12.3 +pydantic==2.12.4 # via # scippneutron # scitiff -pydantic-core==2.41.4 +pydantic-core==2.41.5 # via pydantic pygments==2.19.2 # via pytest From 0a401d5cebbd14ce8b60934e736ea8c4540fde9d Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Fri, 7 Nov 2025 14:49:29 +0100 Subject: [PATCH 18/24] remove commented code --- src/ess/tbl/orca.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/ess/tbl/orca.py b/src/ess/tbl/orca.py index fb1bf8a..ce66486 100644 --- a/src/ess/tbl/orca.py +++ b/src/ess/tbl/orca.py @@ -138,8 +138,6 @@ def normalize_by_proton_charge_orca_sample( def default_parameters() -> dict: return { - # ProtonChargePath: '/entry/neutron_prod_info/pulse_charge', - # ExposureTimePath: '/entry/instrument/detector/exposure_time', NeXusDetectorName: 'orca_detector', NeXusName[ProtonCharge]: '/entry/neutron_prod_info/pulse_charge', NeXusName[ExposureTime]: '/entry/instrument/orca_detector/camera_exposure', From 62f3d8d7b1df4ff3bc95ac6b6348aa7767e15955 Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Fri, 7 Nov 2025 21:58:58 +0100 Subject: [PATCH 19/24] fix file paths in notebook --- docs/tbl/orca-image-normalization.ipynb | 11 ++++++----- src/ess/tbl/__init__.py | 2 ++ 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/docs/tbl/orca-image-normalization.ipynb b/docs/tbl/orca-image-normalization.ipynb index c578e4a..19abb37 100644 --- a/docs/tbl/orca-image-normalization.ipynb +++ b/docs/tbl/orca-image-normalization.ipynb @@ -17,7 +17,8 @@ "metadata": {}, "outputs": [], "source": [ - "from ess.tbl import orca\n", + "import ess.tbl.data # noqa: F401\n", + "from ess import tbl\n", "from ess.imaging.types import *\n", "import scipp as sc\n", "import plopp as pp\n", @@ -40,11 +41,11 @@ "metadata": {}, "outputs": [], "source": [ - "wf = orca.OrcaNormalizedImagesWorkflow()\n", + "wf = tbl.OrcaNormalizedImagesWorkflow()\n", "\n", - "wf[Filename[SampleRun]] = \"ymir_lego_sample_run.hdf\"\n", - "wf[Filename[DarkBackgroundRun]] = \"ymir_lego_dark_run.hdf\"\n", - "wf[Filename[OpenBeamRun]] = \"ymir_lego_openbeam_run.hdf\"\n", + "wf[Filename[SampleRun]] = tbl.data.tbl_lego_sample_run()\n", + "wf[Filename[DarkBackgroundRun]] = tbl.data.tbl_lego_dark_run()\n", + "wf[Filename[OpenBeamRun]] = tbl.data.tbl_lego_openbeam_run()\n", "wf[NeXusDetectorName] = 'orca_detector'\n", "\n", "wf[MaskingRules] = {} # No masks to begin with\n", diff --git a/src/ess/tbl/__init__.py b/src/ess/tbl/__init__.py index fe5b9d4..508c027 100644 --- a/src/ess/tbl/__init__.py +++ b/src/ess/tbl/__init__.py @@ -5,6 +5,7 @@ import importlib.metadata from . import orca +from .orca import OrcaNormalizedImagesWorkflow from .workflow import TblWorkflow, default_parameters try: @@ -15,6 +16,7 @@ del importlib __all__ = [ + "OrcaNormalizedImagesWorkflow", "TblWorkflow", "default_parameters", "orca", From 20b610caf31bd601db9c6917aaf8a05978cbf308 Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Tue, 11 Nov 2025 11:41:28 +0100 Subject: [PATCH 20/24] use new component_types constraints --- src/ess/tbl/orca.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/ess/tbl/orca.py b/src/ess/tbl/orca.py index ce66486..da8e9ce 100644 --- a/src/ess/tbl/orca.py +++ b/src/ess/tbl/orca.py @@ -151,9 +151,8 @@ def OrcaNormalizedImagesWorkflow(**kwargs) -> sl.Pipeline: wf = GenericNeXusWorkflow( run_types=[SampleRun, OpenBeamRun, DarkBackgroundRun], - # Abusing the monitor_types to load proton charge and exposure time. - # How to we expand the list of possible components? - monitor_types=[ProtonCharge, ExposureTime], + monitor_types=[], + component_types=[ProtonCharge, ExposureTime], **kwargs, ) From bc0c9cd4c2747d28a98165f7203257febc99f503 Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Wed, 19 Nov 2025 15:02:05 +0100 Subject: [PATCH 21/24] use load_from_path from most recent essreduce --- docs/tbl/orca-image-normalization.ipynb | 4 ++-- src/ess/tbl/orca.py | 26 ++++++++++++++++++------- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/docs/tbl/orca-image-normalization.ipynb b/docs/tbl/orca-image-normalization.ipynb index 19abb37..9e248c7 100644 --- a/docs/tbl/orca-image-normalization.ipynb +++ b/docs/tbl/orca-image-normalization.ipynb @@ -90,7 +90,7 @@ "metadata": {}, "outputs": [], "source": [ - "pp.slicer(image)" + "pp.slicer(image, autoscale=False)" ] }, { @@ -113,7 +113,7 @@ "source": [ "wf[MaskingRules] = {'y_pixel_offset': lambda x: x > sc.scalar(0.082, unit='m')}\n", "\n", - "pp.slicer(wf.compute(NormalizedImage))" + "pp.slicer(wf.compute(NormalizedImage), autoscale=False)" ] }, { diff --git a/src/ess/tbl/orca.py b/src/ess/tbl/orca.py index da8e9ce..d0eb8f6 100644 --- a/src/ess/tbl/orca.py +++ b/src/ess/tbl/orca.py @@ -8,10 +8,11 @@ import scipp as sc import scippnexus as sx -from ess.reduce.nexus import GenericNeXusWorkflow, load_component +from ess.reduce.nexus import GenericNeXusWorkflow, load_from_path from ess.reduce.nexus.types import ( - NeXusComponentLocationSpec, NeXusDetectorName, + NeXusFileSpec, + NeXusLocationSpec, NeXusName, ) @@ -29,16 +30,28 @@ def load_proton_charge( - location: NeXusComponentLocationSpec[ProtonCharge, RunType], + file: NeXusFileSpec[RunType], path: NeXusName[ProtonCharge[RunType]] ) -> ProtonCharge[RunType]: - return ProtonCharge[RunType](load_component(location, nx_class=sx.NXlog)["value"]) + # Note that putting '/value' at the end of the 'path' in the default_parameters + # yields different results as it can return a Variable instead of a DataArray, + # depending on the contents of the NeXus file. + return ProtonCharge[RunType]( + load_from_path(NeXusLocationSpec(filename=file.value, component_name=path))[ + "value" + ] + ) def load_exposure_time( - location: NeXusComponentLocationSpec[ExposureTime, RunType], + file: NeXusFileSpec[RunType], path: NeXusName[ExposureTime[RunType]] ) -> ExposureTime[RunType]: + # Note that putting '/value' at the end of the 'path' in the default_parameters + # yields different results as it can return a Variable instead of a DataArray, + # depending on the contents of the NeXus file. return ExposureTime[RunType]( - load_component(location, nx_class=sx.NXlog)["value"].squeeze() + load_from_path(NeXusLocationSpec(filename=file.value, component_name=path))[ + "value" + ].squeeze() ) @@ -152,7 +165,6 @@ def OrcaNormalizedImagesWorkflow(**kwargs) -> sl.Pipeline: wf = GenericNeXusWorkflow( run_types=[SampleRun, OpenBeamRun, DarkBackgroundRun], monitor_types=[], - component_types=[ProtonCharge, ExposureTime], **kwargs, ) From 086e9b62228f97aa9f9038b278454dd06cc76dbe Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Wed, 19 Nov 2025 15:02:45 +0100 Subject: [PATCH 22/24] remove unused import --- src/ess/tbl/orca.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/ess/tbl/orca.py b/src/ess/tbl/orca.py index d0eb8f6..30a5f52 100644 --- a/src/ess/tbl/orca.py +++ b/src/ess/tbl/orca.py @@ -6,7 +6,6 @@ import sciline as sl import scipp as sc -import scippnexus as sx from ess.reduce.nexus import GenericNeXusWorkflow, load_from_path from ess.reduce.nexus.types import ( From e6e6b0eeca415c02b232e4ad823988126f2efda7 Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Wed, 19 Nov 2025 15:03:20 +0100 Subject: [PATCH 23/24] bump essreduce version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 9102ab1..8bb1ce2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,7 @@ dependencies = [ "scippneutron>=24.12.0", "scippnexus>=23.11.1", "tifffile>=2024.7.2", - "essreduce>=25.11.1", + "essreduce>=25.11.2", "scitiff>=25.7", ] From d9a06bd2c401e5fd52702e0013878e44b9870c9b Mon Sep 17 00:00:00 2001 From: Neil Vaytet Date: Wed, 19 Nov 2025 15:10:30 +0100 Subject: [PATCH 24/24] update deps --- requirements/base.in | 2 +- requirements/base.txt | 17 ++++++++--------- requirements/basetest.txt | 19 +++++++++---------- requirements/ci.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/docs.txt | 10 ++++++---- requirements/nightly.txt | 15 ++++++++------- requirements/static.txt | 2 +- 8 files changed, 37 insertions(+), 36 deletions(-) diff --git a/requirements/base.in b/requirements/base.in index b21641f..9e1a2c5 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -10,5 +10,5 @@ scipp>=25.4.0 scippneutron>=24.12.0 scippnexus>=23.11.1 tifffile>=2024.7.2 -essreduce>=25.11.1 +essreduce>=25.11.2 scitiff>=25.7 diff --git a/requirements/base.txt b/requirements/base.txt index f55add7..057b951 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,4 +1,4 @@ -# SHA1:793b699242696bedfa9d4b24286c04a03b149091 +# SHA1:a1100845ace4b19ad8a759324efbe63c69022f7b # # This file was generated by pip-compile-multi. # To update, run: @@ -7,13 +7,13 @@ # annotated-types==0.7.0 # via pydantic -asttokens==3.0.0 +asttokens==3.0.1 # via stack-data attrs==25.4.0 # via # jsonschema # referencing -click==8.3.0 +click==8.3.1 # via dask cloudpickle==3.1.2 # via dask @@ -33,7 +33,7 @@ dnspython==2.8.0 # via email-validator email-validator==2.3.0 # via scippneutron -essreduce==25.11.1 +essreduce==25.11.2 # via -r base.in executing==2.2.1 # via stack-data @@ -98,7 +98,7 @@ mpltoolbox==25.10.0 # scippneutron networkx==3.5 # via cyclebane -numpy==2.3.4 +numpy==2.3.5 # via # contourpy # h5py @@ -125,7 +125,7 @@ pillow==12.0.0 # via # ipympl # matplotlib -plopp[all]==25.10.0 +plopp[all]==25.11.0 # via # -r base.in # scippneutron @@ -151,7 +151,6 @@ python-dateutil==2.9.0.post0 # via # matplotlib # scippneutron - # scippnexus pythreejs==2.4.2 # via plopp pyyaml==6.0.3 @@ -160,7 +159,7 @@ referencing==0.37.0 # via # jsonschema # jsonschema-specifications -rpds-py==0.28.0 +rpds-py==0.29.0 # via # jsonschema # referencing @@ -180,7 +179,7 @@ scippneutron==25.7.0 # via # -r base.in # essreduce -scippnexus==25.6.0 +scippnexus==25.11.0 # via # -r base.in # essreduce diff --git a/requirements/basetest.txt b/requirements/basetest.txt index 860519a..1c8e9af 100644 --- a/requirements/basetest.txt +++ b/requirements/basetest.txt @@ -11,7 +11,7 @@ attrs==25.4.0 # via # jsonschema # referencing -certifi==2025.10.5 +certifi==2025.11.12 # via requests charset-normalizer==3.4.4 # via requests @@ -37,12 +37,11 @@ lazy-loader==0.4 # tof matplotlib==3.10.7 # via plopp -numpy==2.3.4 +numpy==2.3.5 # via # contourpy # matplotlib # scipp - # scipy # tifffile packaging==25.0 # via @@ -54,12 +53,14 @@ pillow==12.0.0 # via matplotlib platformdirs==4.5.0 # via pooch -plopp==25.10.0 +plopp==25.11.0 # via tof pluggy==1.6.0 # via pytest pooch==1.8.2 - # via -r basetest.in + # via + # -r basetest.in + # tof pydantic==2.12.4 # via scitiff pydantic-core==2.41.5 @@ -68,7 +69,7 @@ pygments==2.19.2 # via pytest pyparsing==3.2.5 # via matplotlib -pytest==8.4.2 +pytest==9.0.1 # via -r basetest.in python-dateutil==2.9.0.post0 # via matplotlib @@ -78,7 +79,7 @@ referencing==0.37.0 # jsonschema-specifications requests==2.32.5 # via pooch -rpds-py==0.28.0 +rpds-py==0.29.0 # via # jsonschema # referencing @@ -86,15 +87,13 @@ scipp==25.11.0 # via # scitiff # tof -scipy==1.16.3 - # via tof scitiff==25.7.0 # via -r basetest.in six==1.17.0 # via python-dateutil tifffile==2025.10.16 # via scitiff -tof==25.10.1 +tof==25.12.0 # via -r basetest.in typing-extensions==4.15.0 # via diff --git a/requirements/ci.txt b/requirements/ci.txt index 5bc02f4..4b49511 100644 --- a/requirements/ci.txt +++ b/requirements/ci.txt @@ -5,9 +5,9 @@ # # requirements upgrade # -cachetools==6.2.1 +cachetools==6.2.2 # via tox -certifi==2025.10.5 +certifi==2025.11.12 # via requests chardet==5.2.0 # via tox diff --git a/requirements/dev.txt b/requirements/dev.txt index 3235772..b265a2d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -66,7 +66,7 @@ jupyter-server==2.17.0 # notebook-shim jupyter-server-terminals==0.5.3 # via jupyter-server -jupyterlab==4.4.10 +jupyterlab==4.5.0 # via -r dev.in jupyterlab-server==2.28.0 # via jupyterlab @@ -78,7 +78,7 @@ overrides==7.7.0 # via jupyter-server pip-compile-multi==3.2.2 # via -r dev.in -pip-tools==7.5.1 +pip-tools==7.5.2 # via pip-compile-multi plumbum==1.10.0 # via copier diff --git a/requirements/docs.txt b/requirements/docs.txt index 6b7d07f..28b6132 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -22,7 +22,7 @@ beautifulsoup4==4.14.2 # pydata-sphinx-theme bleach[css]==6.3.0 # via nbconvert -certifi==2025.10.5 +certifi==2025.11.12 # via requests charset-normalizer==3.4.4 # via requests @@ -97,10 +97,12 @@ platformdirs==4.5.0 # jupyter-core # pooch pooch==1.8.2 - # via -r docs.in + # via + # -r docs.in + # tof psutil==7.1.3 # via ipykernel -pydantic-settings==2.11.0 +pydantic-settings==2.12.0 # via autodoc-pydantic pydata-sphinx-theme==0.16.1 # via -r docs.in @@ -148,7 +150,7 @@ sphinxcontrib-serializinghtml==2.0.0 # via sphinx tinycss2==1.4.0 # via bleach -tof==25.10.1 +tof==25.12.0 # via -r docs.in tornado==6.5.2 # via diff --git a/requirements/nightly.txt b/requirements/nightly.txt index 78f52cf..a0a4769 100644 --- a/requirements/nightly.txt +++ b/requirements/nightly.txt @@ -14,11 +14,11 @@ attrs==25.4.0 # via # jsonschema # referencing -certifi==2025.10.5 +certifi==2025.11.12 # via requests charset-normalizer==3.4.4 # via requests -click==8.3.0 +click==8.3.1 # via dask cloudpickle==3.1.2 # via dask @@ -75,7 +75,7 @@ mpltoolbox==25.10.0 # via scippneutron networkx==3.6rc0 # via cyclebane -numpy==2.3.4 +numpy==2.3.5 # via # contourpy # h5py @@ -105,7 +105,9 @@ plopp @ git+https://github.com/scipp/plopp@main pluggy==1.6.0 # via pytest pooch==1.8.2 - # via -r nightly.in + # via + # -r nightly.in + # tof pydantic==2.12.4 # via # scippneutron @@ -116,7 +118,7 @@ pygments==2.19.2 # via pytest pyparsing==3.3.0a1 # via matplotlib -pytest==8.4.2 +pytest==9.0.1 # via -r nightly.in python-dateutil==2.9.0.post0 # via @@ -130,7 +132,7 @@ referencing==0.37.0 # jsonschema-specifications requests==2.32.5 # via pooch -rpds-py==0.28.0 +rpds-py==0.29.0 # via # jsonschema # referencing @@ -159,7 +161,6 @@ scipy==1.16.3 # via # scippneutron # scippnexus - # tof scitiff==25.7.0 # via -r nightly.in six==1.17.0 diff --git a/requirements/static.txt b/requirements/static.txt index 991a892..6e237b2 100644 --- a/requirements/static.txt +++ b/requirements/static.txt @@ -17,7 +17,7 @@ nodeenv==1.9.1 # via pre-commit platformdirs==4.5.0 # via virtualenv -pre-commit==4.3.0 +pre-commit==4.4.0 # via -r static.in pyyaml==6.0.3 # via pre-commit