diff --git a/docs/tbl/index.md b/docs/tbl/index.md index 691a1e0..9e5ac93 100644 --- a/docs/tbl/index.md +++ b/docs/tbl/index.md @@ -6,5 +6,6 @@ maxdepth: 1 --- tbl-data-reduction +orca-image-normalization tbl-make-tof-lookup-table ``` diff --git a/docs/tbl/orca-image-normalization.ipynb b/docs/tbl/orca-image-normalization.ipynb new file mode 100644 index 0000000..9e248c7 --- /dev/null +++ b/docs/tbl/orca-image-normalization.ipynb @@ -0,0 +1,172 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": [ + "# TBL: Orca image normalization workflow\n", + "\n", + "This notebook shows how to use the workflow to compute normalized images recorded by the Orca detector on the TBL instrument." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1", + "metadata": {}, + "outputs": [], + "source": [ + "import ess.tbl.data # noqa: F401\n", + "from ess import tbl\n", + "from ess.imaging.types import *\n", + "import scipp as sc\n", + "import plopp as pp\n", + "\n", + "%matplotlib widget" + ] + }, + { + "cell_type": "markdown", + "id": "2", + "metadata": {}, + "source": [ + "## Workflow setup" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3", + "metadata": {}, + "outputs": [], + "source": [ + "wf = tbl.OrcaNormalizedImagesWorkflow()\n", + "\n", + "wf[Filename[SampleRun]] = tbl.data.tbl_lego_sample_run()\n", + "wf[Filename[DarkBackgroundRun]] = tbl.data.tbl_lego_dark_run()\n", + "wf[Filename[OpenBeamRun]] = tbl.data.tbl_lego_openbeam_run()\n", + "wf[NeXusDetectorName] = 'orca_detector'\n", + "\n", + "wf[MaskingRules] = {} # No masks to begin with\n", + "wf[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.upper_bound" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4", + "metadata": {}, + "outputs": [], + "source": [ + "wf.visualize(NormalizedImage, compact=True, graph_attr={\"rankdir\": \"LR\"})" + ] + }, + { + "cell_type": "markdown", + "id": "5", + "metadata": {}, + "source": [ + "## Run the workflow\n", + "\n", + "We compute the final normalized image:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6", + "metadata": {}, + "outputs": [], + "source": [ + "image = wf.compute(NormalizedImage)\n", + "image" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7", + "metadata": {}, + "outputs": [], + "source": [ + "pp.slicer(image, autoscale=False)" + ] + }, + { + "cell_type": "markdown", + "id": "8", + "metadata": {}, + "source": [ + "## Adding masks\n", + "\n", + "If we want to mask some part of the image, we update the masking rules.\n", + "For example, here we mask the upper part of the image:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9", + "metadata": {}, + "outputs": [], + "source": [ + "wf[MaskingRules] = {'y_pixel_offset': lambda x: x > sc.scalar(0.082, unit='m')}\n", + "\n", + "pp.slicer(wf.compute(NormalizedImage), autoscale=False)" + ] + }, + { + "cell_type": "markdown", + "id": "10", + "metadata": {}, + "source": [ + "## Intermediate results\n", + "\n", + "We can also inspect intermediate results, which is useful for debugging:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "11", + "metadata": {}, + "outputs": [], + "source": [ + "results = wf.compute([\n", + " RawDetector[SampleRun],\n", + " CorrectedDetector[SampleRun],\n", + " BackgroundSubtractedDetector[SampleRun]\n", + "])\n", + "\n", + "fig = pp.tiled(2, 2, hspace=0.3, wspace=0.3)\n", + "fig[0, 0] = results[RawDetector[SampleRun]]['time', 0].plot(title='Raw data')\n", + "fig[0, 1] = results[CorrectedDetector[SampleRun]]['time', 0].plot(title='Masks applied')\n", + "fig[1, 0] = results[BackgroundSubtractedDetector[SampleRun]]['time', 0].plot(title='Background subtracted')\n", + "fig[1, 1] = image['time', 0].plot(title='Final image')\n", + "fig" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.7" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/pyproject.toml b/pyproject.toml index 453643a..8bb1ce2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,7 @@ dependencies = [ "scippneutron>=24.12.0", "scippnexus>=23.11.1", "tifffile>=2024.7.2", - "essreduce>=25.11.0", + "essreduce>=25.11.2", "scitiff>=25.7", ] diff --git a/requirements/base.in b/requirements/base.in index ff57099..9e1a2c5 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -10,5 +10,5 @@ scipp>=25.4.0 scippneutron>=24.12.0 scippnexus>=23.11.1 tifffile>=2024.7.2 -essreduce>=25.11.0 +essreduce>=25.11.2 scitiff>=25.7 diff --git a/requirements/base.txt b/requirements/base.txt index 63dd3ee..057b951 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,4 +1,4 @@ -# SHA1:da99ba40cef426287cd34dd1a2b56aefd0cb24e6 +# SHA1:a1100845ace4b19ad8a759324efbe63c69022f7b # # This file was generated by pip-compile-multi. # To update, run: @@ -7,13 +7,13 @@ # annotated-types==0.7.0 # via pydantic -asttokens==3.0.0 +asttokens==3.0.1 # via stack-data attrs==25.4.0 # via # jsonschema # referencing -click==8.3.0 +click==8.3.1 # via dask cloudpickle==3.1.2 # via dask @@ -25,7 +25,7 @@ cyclebane==24.10.0 # via sciline cycler==0.12.1 # via matplotlib -dask==2025.10.0 +dask==2025.11.0 # via -r base.in decorator==5.2.1 # via ipython @@ -33,7 +33,7 @@ dnspython==2.8.0 # via email-validator email-validator==2.3.0 # via scippneutron -essreduce==25.11.0 +essreduce==25.11.2 # via -r base.in executing==2.2.1 # via stack-data @@ -57,7 +57,7 @@ ipydatawidgets==4.3.5 # via pythreejs ipympl==0.9.8 # via plopp -ipython==9.6.0 +ipython==9.7.0 # via # ipympl # ipywidgets @@ -98,7 +98,7 @@ mpltoolbox==25.10.0 # scippneutron networkx==3.5 # via cyclebane -numpy==2.3.4 +numpy==2.3.5 # via # contourpy # h5py @@ -125,7 +125,7 @@ pillow==12.0.0 # via # ipympl # matplotlib -plopp[all]==25.10.0 +plopp[all]==25.11.0 # via # -r base.in # scippneutron @@ -135,11 +135,11 @@ ptyprocess==0.7.0 # via pexpect pure-eval==0.2.3 # via stack-data -pydantic==2.12.3 +pydantic==2.12.4 # via # scippneutron # scitiff -pydantic-core==2.41.4 +pydantic-core==2.41.5 # via pydantic pygments==2.19.2 # via @@ -151,7 +151,6 @@ python-dateutil==2.9.0.post0 # via # matplotlib # scippneutron - # scippnexus pythreejs==2.4.2 # via plopp pyyaml==6.0.3 @@ -160,7 +159,7 @@ referencing==0.37.0 # via # jsonschema # jsonschema-specifications -rpds-py==0.28.0 +rpds-py==0.29.0 # via # jsonschema # referencing @@ -180,7 +179,7 @@ scippneutron==25.7.0 # via # -r base.in # essreduce -scippnexus==25.6.0 +scippnexus==25.11.0 # via # -r base.in # essreduce diff --git a/requirements/basetest.txt b/requirements/basetest.txt index cee4182..1c8e9af 100644 --- a/requirements/basetest.txt +++ b/requirements/basetest.txt @@ -11,7 +11,7 @@ attrs==25.4.0 # via # jsonschema # referencing -certifi==2025.10.5 +certifi==2025.11.12 # via requests charset-normalizer==3.4.4 # via requests @@ -37,12 +37,11 @@ lazy-loader==0.4 # tof matplotlib==3.10.7 # via plopp -numpy==2.3.4 +numpy==2.3.5 # via # contourpy # matplotlib # scipp - # scipy # tifffile packaging==25.0 # via @@ -54,21 +53,23 @@ pillow==12.0.0 # via matplotlib platformdirs==4.5.0 # via pooch -plopp==25.10.0 +plopp==25.11.0 # via tof pluggy==1.6.0 # via pytest pooch==1.8.2 - # via -r basetest.in -pydantic==2.12.3 + # via + # -r basetest.in + # tof +pydantic==2.12.4 # via scitiff -pydantic-core==2.41.4 +pydantic-core==2.41.5 # via pydantic pygments==2.19.2 # via pytest pyparsing==3.2.5 # via matplotlib -pytest==8.4.2 +pytest==9.0.1 # via -r basetest.in python-dateutil==2.9.0.post0 # via matplotlib @@ -78,7 +79,7 @@ referencing==0.37.0 # jsonschema-specifications requests==2.32.5 # via pooch -rpds-py==0.28.0 +rpds-py==0.29.0 # via # jsonschema # referencing @@ -86,15 +87,13 @@ scipp==25.11.0 # via # scitiff # tof -scipy==1.16.3 - # via tof scitiff==25.7.0 # via -r basetest.in six==1.17.0 # via python-dateutil tifffile==2025.10.16 # via scitiff -tof==25.10.1 +tof==25.12.0 # via -r basetest.in typing-extensions==4.15.0 # via diff --git a/requirements/ci.txt b/requirements/ci.txt index 5bc02f4..4b49511 100644 --- a/requirements/ci.txt +++ b/requirements/ci.txt @@ -5,9 +5,9 @@ # # requirements upgrade # -cachetools==6.2.1 +cachetools==6.2.2 # via tox -certifi==2025.10.5 +certifi==2025.11.12 # via requests chardet==5.2.0 # via tox diff --git a/requirements/dev.txt b/requirements/dev.txt index 3235772..b265a2d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -66,7 +66,7 @@ jupyter-server==2.17.0 # notebook-shim jupyter-server-terminals==0.5.3 # via jupyter-server -jupyterlab==4.4.10 +jupyterlab==4.5.0 # via -r dev.in jupyterlab-server==2.28.0 # via jupyterlab @@ -78,7 +78,7 @@ overrides==7.7.0 # via jupyter-server pip-compile-multi==3.2.2 # via -r dev.in -pip-tools==7.5.1 +pip-tools==7.5.2 # via pip-compile-multi plumbum==1.10.0 # via copier diff --git a/requirements/docs.txt b/requirements/docs.txt index 6b7d07f..28b6132 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -22,7 +22,7 @@ beautifulsoup4==4.14.2 # pydata-sphinx-theme bleach[css]==6.3.0 # via nbconvert -certifi==2025.10.5 +certifi==2025.11.12 # via requests charset-normalizer==3.4.4 # via requests @@ -97,10 +97,12 @@ platformdirs==4.5.0 # jupyter-core # pooch pooch==1.8.2 - # via -r docs.in + # via + # -r docs.in + # tof psutil==7.1.3 # via ipykernel -pydantic-settings==2.11.0 +pydantic-settings==2.12.0 # via autodoc-pydantic pydata-sphinx-theme==0.16.1 # via -r docs.in @@ -148,7 +150,7 @@ sphinxcontrib-serializinghtml==2.0.0 # via sphinx tinycss2==1.4.0 # via bleach -tof==25.10.1 +tof==25.12.0 # via -r docs.in tornado==6.5.2 # via diff --git a/requirements/nightly.txt b/requirements/nightly.txt index 93e125c..a0a4769 100644 --- a/requirements/nightly.txt +++ b/requirements/nightly.txt @@ -14,11 +14,11 @@ attrs==25.4.0 # via # jsonschema # referencing -certifi==2025.10.5 +certifi==2025.11.12 # via requests charset-normalizer==3.4.4 # via requests -click==8.3.0 +click==8.3.1 # via dask cloudpickle==3.1.2 # via dask @@ -28,7 +28,7 @@ cyclebane==24.10.0 # via sciline cycler==0.12.1 # via matplotlib -dask==2025.10.0 +dask==2025.11.0 # via -r nightly.in dnspython==2.8.0 # via email-validator @@ -73,9 +73,9 @@ matplotlib==3.10.7 # plopp mpltoolbox==25.10.0 # via scippneutron -networkx==3.5 +networkx==3.6rc0 # via cyclebane -numpy==2.3.4 +numpy==2.3.5 # via # contourpy # h5py @@ -105,18 +105,20 @@ plopp @ git+https://github.com/scipp/plopp@main pluggy==1.6.0 # via pytest pooch==1.8.2 - # via -r nightly.in -pydantic==2.12.3 + # via + # -r nightly.in + # tof +pydantic==2.12.4 # via # scippneutron # scitiff -pydantic-core==2.41.4 +pydantic-core==2.41.5 # via pydantic pygments==2.19.2 # via pytest pyparsing==3.3.0a1 # via matplotlib -pytest==8.4.2 +pytest==9.0.1 # via -r nightly.in python-dateutil==2.9.0.post0 # via @@ -130,7 +132,7 @@ referencing==0.37.0 # jsonschema-specifications requests==2.32.5 # via pooch -rpds-py==0.28.0 +rpds-py==0.29.0 # via # jsonschema # referencing @@ -159,7 +161,6 @@ scipy==1.16.3 # via # scippneutron # scippnexus - # tof scitiff==25.7.0 # via -r nightly.in six==1.17.0 diff --git a/requirements/static.txt b/requirements/static.txt index 991a892..6e237b2 100644 --- a/requirements/static.txt +++ b/requirements/static.txt @@ -17,7 +17,7 @@ nodeenv==1.9.1 # via pre-commit platformdirs==4.5.0 # via virtualenv -pre-commit==4.3.0 +pre-commit==4.4.0 # via -r static.in pyyaml==6.0.3 # via pre-commit diff --git a/src/ess/imaging/__init__.py b/src/ess/imaging/__init__.py index f1c1083..9d49718 100644 --- a/src/ess/imaging/__init__.py +++ b/src/ess/imaging/__init__.py @@ -9,11 +9,13 @@ except importlib.metadata.PackageNotFoundError: __version__ = "0.0.0" -from . import tools +from . import masking, normalization, tools del importlib __all__ = [ "__version__", + "masking", + "normalization", "tools", ] diff --git a/src/ess/imaging/masking.py b/src/ess/imaging/masking.py new file mode 100644 index 0000000..2a6cc7b --- /dev/null +++ b/src/ess/imaging/masking.py @@ -0,0 +1,33 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +""" +Contains the providers to apply masks to detector data. +""" + +import scipp as sc + +from ess.reduce.nexus.types import RawDetector + +from ..imaging.types import CorrectedDetector, MaskingRules, RunType + + +def apply_masks( + da: RawDetector[RunType], masks: MaskingRules +) -> CorrectedDetector[RunType]: + if not masks: + return CorrectedDetector[RunType](da) + out = da.copy(deep=False) + for coord_name, mask in masks.items(): + if (out.bins is not None) and (coord_name in out.bins.coords): + out.bins.masks[coord_name] = mask(out.bins.coords[coord_name]) + else: + coord = ( + sc.midpoints(out.coords[coord_name]) + if out.coords.is_edges(coord_name, coord_name) + else out.coords[coord_name] + ) + out.masks[coord_name] = mask(coord) + return CorrectedDetector[RunType](out) + + +providers = (apply_masks,) diff --git a/src/ess/imaging/normalization.py b/src/ess/imaging/normalization.py new file mode 100644 index 0000000..7dc953e --- /dev/null +++ b/src/ess/imaging/normalization.py @@ -0,0 +1,89 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +""" +Contains the providers for normalization. +""" + +from ess.reduce.uncertainty import broadcast_uncertainties + +from .types import ( + BackgroundSubtractedDetector, + DarkBackgroundRun, + FluxNormalizedDetector, + NormalizedImage, + OpenBeamRun, + SampleRun, + UncertaintyBroadcastMode, +) + + +def subtract_background_sample( + data: FluxNormalizedDetector[SampleRun], + background: FluxNormalizedDetector[DarkBackgroundRun], + uncertainties: UncertaintyBroadcastMode, +) -> BackgroundSubtractedDetector[SampleRun]: + """ + Subtract background from proton-charge normalized sample data. + + Parameters + ---------- + data: + Sample data to process (normalized to proton charge). + background: + Background (dark frame) data to subtract (normalized to proton charge). + uncertainties: + Mode to use when broadcasting uncertainties from background to data (data + typically has multiple frames, while background usually has only one frame). + """ + return BackgroundSubtractedDetector[SampleRun]( + data - broadcast_uncertainties(background, prototype=data, mode=uncertainties) + ) + + +def subtract_background_openbeam( + data: FluxNormalizedDetector[OpenBeamRun], + background: FluxNormalizedDetector[DarkBackgroundRun], +) -> BackgroundSubtractedDetector[OpenBeamRun]: + """ + Subtract background from proton-charge normalized open beam data. + + Parameters + ---------- + data: + Open beam data to process (normalized to proton charge). + background: + Background (dark frame) data to subtract (normalized to proton charge). + """ + return BackgroundSubtractedDetector[OpenBeamRun](data - background) + + +def sample_over_openbeam( + sample: BackgroundSubtractedDetector[SampleRun], + open_beam: BackgroundSubtractedDetector[OpenBeamRun], + uncertainties: UncertaintyBroadcastMode, +) -> NormalizedImage: + """ + Divide background-subtracted sample data by background-subtracted open beam data, + to obtain final normalized image. + + Parameters + ---------- + sample: + Sample data to process (background subtracted). + open_beam: + Open beam data to divide by (background subtracted). + uncertainties: + Mode to use when broadcasting uncertainties from open beam to sample (sample + typically has multiple frames, while open beam usually has only one frame). + """ + return NormalizedImage( + sample + / broadcast_uncertainties(open_beam, prototype=sample, mode=uncertainties) + ) + + +providers = ( + subtract_background_sample, + subtract_background_openbeam, + sample_over_openbeam, +) diff --git a/src/ess/imaging/types.py b/src/ess/imaging/types.py index 1f5c866..bb95e37 100644 --- a/src/ess/imaging/types.py +++ b/src/ess/imaging/types.py @@ -9,6 +9,7 @@ from ess.reduce.nexus import types as reduce_t from ess.reduce.time_of_flight import types as tof_t +from ess.reduce.uncertainty import UncertaintyBroadcastMode as _UncertaintyBroadcastMode # 1 TypeVars used to parametrize the generic parts of the workflow @@ -27,6 +28,8 @@ TimeOfFlightLookupTable = tof_t.TimeOfFlightLookupTable TimeOfFlightLookupTableFilename = tof_t.TimeOfFlightLookupTableFilename +UncertaintyBroadcastMode = _UncertaintyBroadcastMode + SampleRun = NewType('SampleRun', int) """Sample run; a run with a sample in the beam.""" @@ -71,7 +74,28 @@ class WavelengthDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): class CorrectedDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): - """Detector data with masks.""" + """Corrected detector counts with masking applied.""" + + +class FluxNormalizedDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """Detector counts normalized to proton charge.""" + + +class BackgroundSubtractedDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """Detector counts with dark background subtracted.""" + + +NormalizedImage = NewType('NormalizedImage', sc.DataArray) +"""Final image: background-subtracted sample run divided by background-subtracted open +beam run.""" + + +class ProtonCharge(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """Proton charge data for a run.""" + + +class ExposureTime(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """Exposure time of each frame recorded by the camera detector.""" del sc, sciline, NewType diff --git a/src/ess/tbl/__init__.py b/src/ess/tbl/__init__.py index c3e8a29..508c027 100644 --- a/src/ess/tbl/__init__.py +++ b/src/ess/tbl/__init__.py @@ -4,6 +4,8 @@ import importlib.metadata +from . import orca +from .orca import OrcaNormalizedImagesWorkflow from .workflow import TblWorkflow, default_parameters try: @@ -14,6 +16,8 @@ del importlib __all__ = [ + "OrcaNormalizedImagesWorkflow", "TblWorkflow", "default_parameters", + "orca", ] diff --git a/src/ess/tbl/data.py b/src/ess/tbl/data.py index b9a2c6d..d01c58d 100644 --- a/src/ess/tbl/data.py +++ b/src/ess/tbl/data.py @@ -8,13 +8,16 @@ _registry = make_registry( 'ess/tbl', - version="1", + version="2", files={ "tbl_sample_data_2025-03.hdf": "md5:12db6bc06721278b3abe47992eac3e77", "TBL-tof-lookup-table-no-choppers.h5": "md5:8bc98fac0ee64fc8f5decf509c75bafe", 'tbl-orca-focussing.hdf.zip': Entry( alg='md5', chk='f365acd9ea45dd205c0b9398d163cfa4', unzip=True ), + "ymir_lego_dark_run.hdf": "md5:c0ed089dd7663986042e29fb47514130", + "ymir_lego_openbeam_run.hdf": "md5:00375becd54d2ed3be096413dc30883c", + "ymir_lego_sample_run.hdf": "md5:ae56a335cf3d4e87ef090ec4e51da69c", }, ) @@ -44,3 +47,36 @@ def tbl_orca_focussing_data() -> pathlib.Path: """ return _registry.get_path('tbl-orca-focussing.hdf.zip') + + +def tbl_lego_dark_run() -> pathlib.Path: + """ + Return the path to the TBL LEGO dark run HDF5 file, created from the YMIR data. + This file was created using the tools/make-tbl-images-from-ymir.ipynb notebook. + A TBL file (857127_00000212.hdf) was used as a template for the NeXus structure. + The dark run data was extracted from the YMIR LEGO run (first 5 frames). + """ + + return _registry.get_path("ymir_lego_dark_run.hdf") + + +def tbl_lego_openbeam_run() -> pathlib.Path: + """ + Return the path to the TBL LEGO open beam run HDF5 file, created from the YMIR data. + This file was created using the tools/make-tbl-images-from-ymir.ipynb notebook. + A TBL file (857127_00000212.hdf) was used as a template for the NeXus structure. + The open beam run data was extracted from the YMIR LEGO run (frames 5 to 10). + """ + + return _registry.get_path("ymir_lego_openbeam_run.hdf") + + +def tbl_lego_sample_run() -> pathlib.Path: + """ + Return the path to the TBL LEGO sample run HDF5 file, created from the YMIR data. + This file was created using the tools/make-tbl-images-from-ymir.ipynb notebook. + A TBL file (857127_00000212.hdf) was used as a template for the NeXus structure. + The sample run data was extracted from the YMIR LEGO run (frames 10 and onward). + """ + + return _registry.get_path("ymir_lego_sample_run.hdf") diff --git a/src/ess/tbl/orca.py b/src/ess/tbl/orca.py new file mode 100644 index 0000000..30a5f52 --- /dev/null +++ b/src/ess/tbl/orca.py @@ -0,0 +1,178 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +""" +Contains the providers for the orca workflow. +""" + +import sciline as sl +import scipp as sc + +from ess.reduce.nexus import GenericNeXusWorkflow, load_from_path +from ess.reduce.nexus.types import ( + NeXusDetectorName, + NeXusFileSpec, + NeXusLocationSpec, + NeXusName, +) + +from .. import imaging +from ..imaging.types import ( + CorrectedDetector, + DarkBackgroundRun, + ExposureTime, + FluxNormalizedDetector, + OpenBeamRun, + ProtonCharge, + RunType, + SampleRun, +) + + +def load_proton_charge( + file: NeXusFileSpec[RunType], path: NeXusName[ProtonCharge[RunType]] +) -> ProtonCharge[RunType]: + # Note that putting '/value' at the end of the 'path' in the default_parameters + # yields different results as it can return a Variable instead of a DataArray, + # depending on the contents of the NeXus file. + return ProtonCharge[RunType]( + load_from_path(NeXusLocationSpec(filename=file.value, component_name=path))[ + "value" + ] + ) + + +def load_exposure_time( + file: NeXusFileSpec[RunType], path: NeXusName[ExposureTime[RunType]] +) -> ExposureTime[RunType]: + # Note that putting '/value' at the end of the 'path' in the default_parameters + # yields different results as it can return a Variable instead of a DataArray, + # depending on the contents of the NeXus file. + return ExposureTime[RunType]( + load_from_path(NeXusLocationSpec(filename=file.value, component_name=path))[ + "value" + ].squeeze() + ) + + +def _compute_proton_charge_per_exposure( + data: sc.DataArray, proton_charge: sc.DataArray, exposure_time: sc.DataArray +) -> sc.DataArray: + # A note on timings: + # We want to sum the proton charge inside each time bin (defined by the duration of + # each frame). However, the time dimension of the data recorded at the detector is + # not the same time as the proton charge (which is when the protons hit the + # target). We need to shift the proton charge time to account for the time it takes + # for neutrons to travel from the target to the detector. Does this mean we cannot + # do the normalization without computing time of flight? + + t = data.coords['time'] + exp = exposure_time.data.to(unit=t.unit) + # The following assumes that the different between successive frames (time stamps) + # is larger than the exposure time. We need to check that this is indeed the case. + if (t[1:] - t[:-1]).min() < exp: + raise ValueError( + "normalize_by_proton_charge_orca: Exposure time is larger than the " + "smallest time between successive frames." + ) + + bins = sc.sort(sc.concat([t, t + exp], dim=t.dim), t.dim) + # We select every second bin, as the odd bins lie between the end of the exposure + # of one frame and the start of the next frame. + return proton_charge.hist(time=bins).data[::2] + + +def normalize_by_proton_charge_orca( + data: CorrectedDetector[RunType], + proton_charge: ProtonCharge[RunType], + exposure_time: ExposureTime[RunType], +) -> FluxNormalizedDetector[RunType]: + """ + Normalize detector data by the proton charge (dark and open beam runs). + We find the time stamps for the data, which mark the start of an exposure. + We sum the proton charge within the intervals timestamp to timestamp + + exposure_time. + We then sum the data along the time dimension, and divide by the sum of the proton + charge accumulated during each exposure. + + Parameters + ---------- + data: + Corrected detector data to be normalized. + proton_charge: + Proton charge data for normalization. + exposure_time: + Exposure time for each image in the data. + """ + + charge_per_frame = _compute_proton_charge_per_exposure( + data, proton_charge, exposure_time + ) + + return FluxNormalizedDetector[RunType]( + data.sum('time') / charge_per_frame.sum('time') + ) + + +def normalize_by_proton_charge_orca_sample( + data: CorrectedDetector[SampleRun], + proton_charge: ProtonCharge[SampleRun], + exposure_time: ExposureTime[SampleRun], +) -> FluxNormalizedDetector[SampleRun]: + """ + Normalize sample run detector data by the proton charge. + The handling of the SampleRun is different from the other runs: + The sample may have a time dimension representing multiple frames. + In this case, we need to sum the proton charge for each frame separately. + + Parameters + ---------- + data: + Corrected detector data to be normalized. + proton_charge: + Proton charge data for normalization. + """ + + charge_per_frame = _compute_proton_charge_per_exposure( + data, proton_charge, exposure_time + ) + + # Here we preserve the time dimension of the sample data and the proton charge. + return FluxNormalizedDetector[SampleRun](data / charge_per_frame) + + +providers = ( + load_exposure_time, + load_proton_charge, + normalize_by_proton_charge_orca, + normalize_by_proton_charge_orca_sample, +) + + +def default_parameters() -> dict: + return { + NeXusDetectorName: 'orca_detector', + NeXusName[ProtonCharge]: '/entry/neutron_prod_info/pulse_charge', + NeXusName[ExposureTime]: '/entry/instrument/orca_detector/camera_exposure', + } + + +def OrcaNormalizedImagesWorkflow(**kwargs) -> sl.Pipeline: + """ + Workflow with default parameters for TBL. + """ + + wf = GenericNeXusWorkflow( + run_types=[SampleRun, OpenBeamRun, DarkBackgroundRun], + monitor_types=[], + **kwargs, + ) + + for provider in ( + *imaging.normalization.providers, + *imaging.masking.providers, + *providers, + ): + wf.insert(provider) + for key, param in default_parameters().items(): + wf[key] = param + return wf diff --git a/tests/tbl/orca_test.py b/tests/tbl/orca_test.py new file mode 100644 index 0000000..425eb00 --- /dev/null +++ b/tests/tbl/orca_test.py @@ -0,0 +1,110 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) + +import pytest +import sciline as sl +import scipp as sc +import scippnexus as sx +from scipp.testing import assert_identical + +import ess.tbl.data # noqa: F401 +from ess import tbl +from ess.imaging.types import ( + BackgroundSubtractedDetector, + CorrectedDetector, + DarkBackgroundRun, + Filename, + FluxNormalizedDetector, + MaskingRules, + NeXusDetectorName, + NormalizedImage, + OpenBeamRun, + Position, + ProtonCharge, + RawDetector, + SampleRun, + UncertaintyBroadcastMode, +) +from ess.tbl import orca + + +@pytest.fixture +def workflow() -> sl.Pipeline: + """ + Workflow for normalizing TBL Orca images. + """ + + wf = orca.OrcaNormalizedImagesWorkflow() + wf[Filename[SampleRun]] = tbl.data.tbl_lego_sample_run() + wf[Filename[DarkBackgroundRun]] = tbl.data.tbl_lego_dark_run() + wf[Filename[OpenBeamRun]] = tbl.data.tbl_lego_openbeam_run() + wf[MaskingRules] = {} + wf[NeXusDetectorName] = 'orca_detector' + wf[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.upper_bound + wf[Position[sx.NXsample, SampleRun]] = sc.vector([0.0, 0.0, 0.0], unit='m') + return wf + + +@pytest.mark.parametrize("run", [SampleRun, OpenBeamRun, DarkBackgroundRun]) +def test_workflow_loads_raw_data(workflow, run): + da = workflow.compute(RawDetector[run]) + assert "position" in da.coords + assert "time" in da.coords + assert da.ndim == 3 + assert "time" in da.dims + + +@pytest.mark.parametrize("run", [SampleRun, OpenBeamRun, DarkBackgroundRun]) +def test_workflow_loads_proton_charge(workflow, run): + pc = workflow.compute(ProtonCharge[run]) + assert "time" in pc.coords + assert "time" in pc.dims + assert pc.unit == "uC" + + +@pytest.mark.parametrize("run", [SampleRun, OpenBeamRun, DarkBackgroundRun]) +def test_workflow_applies_masks(workflow, run): + workflow[MaskingRules] = { + 'y_pixel_offset': lambda x: x > sc.scalar(0.082, unit='m') + } + da = workflow.compute(RawDetector[run]) + masked_da = workflow.compute(CorrectedDetector[run]) + assert 'y_pixel_offset' in masked_da.masks + assert da.sum().value > masked_da.sum().value + + +@pytest.mark.parametrize("run", [OpenBeamRun, DarkBackgroundRun]) +def test_workflow_normalizes_by_proton_charge(workflow, run): + da = workflow.compute(FluxNormalizedDetector[run]) + # Dark and open beam runs have been averaged over time dimension + assert da.ndim == 2 + assert "time" not in da.dims + # TODO: should it be "counts / uC"? + assert da.unit == "1 / uC" + + +def test_workflow_normalizes_sample_by_proton_charge(workflow): + da = workflow.compute(FluxNormalizedDetector[SampleRun]) + # Sample run retains time dimension + assert da.ndim == 3 + assert "time" in da.dims + # TODO: should it be "counts / uC"? + assert da.unit == "1 / uC" + + +@pytest.mark.parametrize("run", [OpenBeamRun, SampleRun]) +def test_workflow_subtracts_dark_background(workflow, run): + background = workflow.compute(FluxNormalizedDetector[DarkBackgroundRun]) + before = workflow.compute(FluxNormalizedDetector[run]) + after = workflow.compute(BackgroundSubtractedDetector[run]) + + assert_identical(sc.values(after), sc.values(before) - sc.values(background)) + + +def test_workflow_computes_normalized_image(workflow): + sample = workflow.compute(BackgroundSubtractedDetector[SampleRun]) + open_beam = workflow.compute(BackgroundSubtractedDetector[OpenBeamRun]) + normalized = workflow.compute(NormalizedImage) + + assert_identical(sc.values(normalized), sc.values(sample) / sc.values(open_beam)) + assert normalized.unit == sc.units.dimensionless diff --git a/tools/make-tbl-images-from-ymir.ipynb b/tools/make-tbl-images-from-ymir.ipynb new file mode 100644 index 0000000..d30aeb4 --- /dev/null +++ b/tools/make-tbl-images-from-ymir.ipynb @@ -0,0 +1,259 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Make TBL dark, open beam and sample images from Ymir data\n", + "\n", + "This notebook creates files for TBL that contain images in the Orca detector file entry.\n", + "We used a TBL file written by ECDC as a template (`857127_00000212.hdf`) and replaced the orca detector data with images recorded at Ymir.\n", + "\n", + "The Ymir data was a single file containing dark frame, open beam, and sample data.\n", + "We split this into 3 separate datasets and files, as this is the way TBL would like to operate (at least initially).\n", + "\n", + "We then add an arbitrary exposure time entry in the file for the images.\n", + "Finally, we generate a fake proton-charge log and place it inside the `neutron_prod_info` group.\n", + "The proton charge is a time series from the first frame time to the last frame time + exposure time, where the value is randomly oscillating around 1." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import h5py as h5\n", + "import numpy as np\n", + "import scipp as sc" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from ess.ymir.data import ymir_lego_images_path" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load Ymir data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "path = 'entry/instrument/histogram_mode_detectors/orca/data/'\n", + "\n", + "with h5.File(ymir_lego_images_path()) as f:\n", + " lego_data = f[path+'value'][()]\n", + " lego_time = f[path+'time'][()]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Split into dark, open beam, and sample" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "dark_data = lego_data[:5, ...]\n", + "dark_time = lego_time[:5, ...]\n", + "\n", + "ob_data = lego_data[5:10, ...]\n", + "ob_time = lego_time[5:10, ...]\n", + "\n", + "sample_data = lego_data[10:, ...]\n", + "sample_time = lego_time[10:, ...]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create fake proton charge" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "exposure_time = int(5e8) # 0.5s (made up)\n", + "period = 1e9/14\n", + "\n", + "dark_charge_time = np.arange(float(dark_time[0]), float(dark_time[-1] + exposure_time), period).astype('uint64')\n", + "dark_charge_value = np.random.uniform(0.99, 1.01, size=len(dark_charge_time)).astype('float64')\n", + "\n", + "ob_charge_time = np.arange(float(ob_time[0]), float(ob_time[-1] + exposure_time), period).astype('uint64')\n", + "ob_charge_value = np.random.uniform(0.99, 1.01, size=len(ob_charge_time)).astype('float64')\n", + "\n", + "sample_charge_time = np.arange(float(sample_time[0]), float(sample_time[-1] + exposure_time), period).astype('uint64')\n", + "sample_charge_value = np.random.uniform(0.99, 1.01, size=len(sample_charge_time)).astype('float64')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Make pixel offsets" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "npix = dark_data.shape[-1]\n", + "\n", + "x_pixel_offset = sc.linspace('dim_1', -0.1, 0.1, npix, unit='m')\n", + "y_pixel_offset = sc.linspace('dim_2', -0.1, 0.1, npix, unit='m')\n", + "x_pixel_offset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create new files from template" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def replace_dataset(entry, name, values):\n", + " attrs = dict(entry[name].attrs)\n", + " del entry[name]\n", + " dset = entry.create_dataset(name, data=values)\n", + " dset.attrs.update(attrs)\n", + "\n", + "def make_new_file(template_nexus_file, outfile, data, proton_charge):\n", + " import shutil\n", + " shutil.copyfile(template_nexus_file, outfile)\n", + "\n", + " # detector_path = 'entry/instrument/histogram_mode_detectors/orca' # ODIN\n", + " detector_path = 'entry/instrument/orca_detector/' # TBL\n", + " proton_charge_path = 'entry/neutron_prod_info/pulse_charge'\n", + "\n", + " with h5.File(outfile, \"r+\") as f:\n", + "\n", + " detector_data = f[detector_path+\"data\"]\n", + " replace_dataset(detector_data, name=\"value\", values=data[\"value\"])\n", + " replace_dataset(detector_data, name=\"time\", values=data[\"time\"])\n", + "\n", + " detector = f[detector_path]\n", + " detector.copy('intensifier', detector, 'camera_exposure')\n", + " exp = f[detector_path+\"camera_exposure\"]\n", + " replace_dataset(exp, name=\"value\", values=np.array([exposure_time]))\n", + " replace_dataset(exp, name=\"average_value\", values=exposure_time)\n", + " replace_dataset(exp, name=\"minimum_value\", values=exposure_time)\n", + " replace_dataset(exp, name=\"maximum_value\", values=exposure_time)\n", + " f[detector_path+\"camera_exposure/value\"].attrs['units'] = 'ns'\n", + " f[detector_path+\"camera_exposure/average_value\"].attrs['units'] = 'ns'\n", + " f[detector_path+\"camera_exposure/minimum_value\"].attrs['units'] = 'ns'\n", + " f[detector_path+\"camera_exposure/maximum_value\"].attrs['units'] = 'ns'\n", + "\n", + " xoff = detector.create_dataset('x_pixel_offset', data=x_pixel_offset.values)\n", + " xoff.attrs['units'] = str(x_pixel_offset.unit)\n", + " xoff.attrs['axis'] = 1\n", + " yoff = detector.create_dataset('y_pixel_offset', data=y_pixel_offset.values)\n", + " yoff.attrs['units'] = str(y_pixel_offset.unit)\n", + " yoff.attrs['axis'] = 2\n", + " detector.attrs['axes'] = ['dim_2', 'dim_1']\n", + "\n", + " pcharge = f[proton_charge_path]\n", + " replace_dataset(pcharge, name=\"value\", values=proton_charge[\"value\"])\n", + " replace_dataset(pcharge, name=\"time\", values=proton_charge[\"time\"])\n", + " replace_dataset(pcharge, name=\"average_value\", values=proton_charge[\"value\"].mean())\n", + " replace_dataset(pcharge, name=\"minimum_value\", values=proton_charge[\"value\"].min())\n", + " replace_dataset(pcharge, name=\"maximum_value\", values=proton_charge[\"value\"].max())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "template_nexus_file = \"857127_00000212.hdf\"\n", + "\n", + "# Dark run\n", + "make_new_file(\n", + " template_nexus_file=template_nexus_file,\n", + " outfile=\"ymir_lego_dark_run.hdf\",\n", + " data={\"value\": dark_data, \"time\": dark_time},\n", + " proton_charge={\"value\": dark_charge_value, \"time\": dark_charge_time}\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Openbeam run\n", + "make_new_file(\n", + " template_nexus_file=template_nexus_file,\n", + " outfile=\"ymir_lego_openbeam_run.hdf\",\n", + " data={\"value\": ob_data, \"time\": ob_time},\n", + " proton_charge={\"value\": ob_charge_value, \"time\": ob_charge_time}\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Sample run\n", + "make_new_file(\n", + " template_nexus_file=template_nexus_file,\n", + " outfile=\"ymir_lego_sample_run.hdf\",\n", + " data={\"value\": sample_data, \"time\": sample_time},\n", + " proton_charge={\"value\": sample_charge_value, \"time\": sample_charge_time}\n", + ")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.7" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +}