diff --git a/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb b/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb index 5b19a7e4..0b3542dc 100644 --- a/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb +++ b/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb @@ -239,7 +239,7 @@ "source": [ "results = workflow.compute(\n", " (\n", - " TofData[SampleRun],\n", + " DetectorTofData[SampleRun],\n", " MaskedData[SampleRun],\n", " FilteredData[SampleRun],\n", " FilteredData[VanadiumRun],\n", @@ -254,7 +254,7 @@ "metadata": {}, "outputs": [], "source": [ - "results[TofData[SampleRun]]" + "results[DetectorTofData[SampleRun]]" ] }, { diff --git a/pyproject.toml b/pyproject.toml index e0378455..bcd0510e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ requires-python = ">=3.10" # Make sure to list one dependency per line. dependencies = [ "dask", - "essreduce>=25.02.4", + "essreduce>=25.05.1", "graphviz", "numpy", "plopp>=25.03.0", diff --git a/requirements/base.in b/requirements/base.in index 79492d7a..b10ff657 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -3,7 +3,7 @@ # --- END OF CUSTOM SECTION --- # The following was generated by 'tox -e deps', DO NOT EDIT MANUALLY! dask -essreduce>=25.02.4 +essreduce>=25.05.1 graphviz numpy plopp>=25.03.0 diff --git a/requirements/base.txt b/requirements/base.txt index a08013e8..5c50f61f 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,4 +1,4 @@ -# SHA1:4c637dd9c91e42089ff8f729de2279bb8611bf83 +# SHA1:9fd92aa14a70d874a87640e7dbb332600b0c2ac7 # # This file is autogenerated by pip-compile-multi # To update, run: @@ -9,19 +9,19 @@ annotated-types==0.7.0 # via pydantic asttokens==3.0.0 # via stack-data -click==8.1.8 +click==8.2.0 # via dask cloudpickle==3.1.1 # via dask comm==0.2.2 # via ipywidgets -contourpy==1.3.1 +contourpy==1.3.2 # via matplotlib cyclebane==24.10.0 # via sciline cycler==0.12.1 # via matplotlib -dask==2025.3.0 +dask==2025.5.0 # via -r base.in decorator==5.2.1 # via ipython @@ -29,13 +29,13 @@ dnspython==2.7.0 # via email-validator email-validator==2.2.0 # via scippneutron -essreduce==25.4.1 +essreduce==25.5.1 # via -r base.in -exceptiongroup==1.2.2 +exceptiongroup==1.3.0 # via ipython executing==2.2.0 # via stack-data -fonttools==4.57.0 +fonttools==4.58.0 # via matplotlib fsspec==2025.3.2 # via dask @@ -47,21 +47,19 @@ h5py==3.13.0 # scippnexus idna==3.10 # via email-validator -importlib-metadata==8.6.1 +importlib-metadata==8.7.0 # via dask -importlib-resources==6.5.2 - # via tof ipydatawidgets==4.3.5 # via pythreejs -ipython==8.35.0 +ipython==8.36.0 # via ipywidgets -ipywidgets==8.1.5 +ipywidgets==8.1.7 # via # ipydatawidgets # pythreejs jedi==0.19.2 # via ipython -jupyterlab-widgets==3.0.13 +jupyterlab-widgets==3.0.15 # via ipywidgets kiwisolver==1.4.8 # via matplotlib @@ -69,31 +67,31 @@ lazy-loader==0.4 # via # plopp # scippneutron + # tof locket==1.0.0 # via partd -matplotlib==3.10.1 +matplotlib==3.10.3 # via # mpltoolbox # plopp matplotlib-inline==0.1.7 # via ipython -mpltoolbox==24.5.1 +mpltoolbox==25.4.0 # via scippneutron networkx==3.4.2 # via cyclebane -numpy==2.2.4 +numpy==2.2.5 # via # -r base.in # contourpy # h5py # ipydatawidgets # matplotlib - # mpltoolbox # pythreejs # scipp # scippneutron # scipy -packaging==24.2 +packaging==25.0 # via # dask # lazy-loader @@ -104,22 +102,22 @@ partd==1.4.2 # via dask pexpect==4.9.0 # via ipython -pillow==11.1.0 +pillow==11.2.1 # via matplotlib -plopp==25.3.0 +plopp==25.4.1 # via # -r base.in # scippneutron # tof -prompt-toolkit==3.0.50 +prompt-toolkit==3.0.51 # via ipython ptyprocess==0.7.0 # via pexpect pure-eval==0.2.3 # via stack-data -pydantic==2.11.3 +pydantic==2.11.4 # via scippneutron -pydantic-core==2.33.1 +pydantic-core==2.33.2 # via pydantic pygments==2.19.1 # via ipython @@ -134,7 +132,7 @@ pythreejs==2.4.2 # via -r base.in pyyaml==6.0.2 # via dask -sciline==25.4.1 +sciline==25.5.0 # via # -r base.in # essreduce @@ -145,7 +143,7 @@ scipp==25.5.0 # scippneutron # scippnexus # tof -scippneutron==25.2.1 +scippneutron==25.5.0 # via # -r base.in # essreduce @@ -154,7 +152,7 @@ scippnexus==25.4.0 # -r base.in # essreduce # scippneutron -scipy==1.15.2 +scipy==1.15.3 # via # scippneutron # scippnexus @@ -163,7 +161,7 @@ six==1.17.0 # via python-dateutil stack-data==0.6.3 # via ipython -tof==25.4.0 +tof==25.5.0 # via -r base.in toolz==1.0.0 # via @@ -179,8 +177,9 @@ traitlets==5.14.3 # traittypes traittypes==0.2.1 # via ipydatawidgets -typing-extensions==4.13.1 +typing-extensions==4.13.2 # via + # exceptiongroup # ipython # pydantic # pydantic-core @@ -190,7 +189,7 @@ typing-inspection==0.4.0 # via pydantic wcwidth==0.2.13 # via prompt-toolkit -widgetsnbextension==4.0.13 +widgetsnbextension==4.0.14 # via ipywidgets zipp==3.21.0 # via importlib-metadata diff --git a/requirements/basetest.txt b/requirements/basetest.txt index 6fbad0bc..29f84169 100644 --- a/requirements/basetest.txt +++ b/requirements/basetest.txt @@ -5,25 +5,25 @@ # # pip-compile-multi # -certifi==2025.1.31 +certifi==2025.4.26 # via requests -charset-normalizer==3.4.1 +charset-normalizer==3.4.2 # via requests -exceptiongroup==1.2.2 +exceptiongroup==1.3.0 # via pytest idna==3.10 # via requests iniconfig==2.1.0 # via pytest -numpy==2.2.4 +numpy==2.2.5 # via pandas -packaging==24.2 +packaging==25.0 # via # pooch # pytest pandas==2.2.3 # via -r basetest.in -platformdirs==4.3.7 +platformdirs==4.3.8 # via pooch pluggy==1.5.0 # via pytest @@ -41,7 +41,9 @@ six==1.17.0 # via python-dateutil tomli==2.2.1 # via pytest +typing-extensions==4.13.2 + # via exceptiongroup tzdata==2025.2 # via pandas -urllib3==2.3.0 +urllib3==2.4.0 # via requests diff --git a/requirements/ci.txt b/requirements/ci.txt index 176b5124..6c606423 100644 --- a/requirements/ci.txt +++ b/requirements/ci.txt @@ -7,11 +7,11 @@ # cachetools==5.5.2 # via tox -certifi==2025.1.31 +certifi==2025.4.26 # via requests chardet==5.2.0 # via tox -charset-normalizer==3.4.1 +charset-normalizer==3.4.2 # via requests colorama==0.4.6 # via tox @@ -27,18 +27,18 @@ gitpython==3.1.44 # via -r ci.in idna==3.10 # via requests -packaging==24.2 +packaging==25.0 # via # -r ci.in # pyproject-api # tox -platformdirs==4.3.7 +platformdirs==4.3.8 # via # tox # virtualenv pluggy==1.5.0 # via tox -pyproject-api==1.9.0 +pyproject-api==1.9.1 # via tox requests==2.32.3 # via -r ci.in @@ -48,11 +48,11 @@ tomli==2.2.1 # via # pyproject-api # tox -tox==4.25.0 +tox==4.26.0 # via -r ci.in -typing-extensions==4.13.1 +typing-extensions==4.13.2 # via tox -urllib3==2.3.0 +urllib3==2.4.0 # via requests -virtualenv==20.30.0 +virtualenv==20.31.2 # via tox diff --git a/requirements/dev.txt b/requirements/dev.txt index 091efdaf..2dacc556 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -26,17 +26,17 @@ async-lru==2.0.5 # via jupyterlab cffi==1.17.1 # via argon2-cffi-bindings -copier==9.6.0 +copier==9.7.1 # via -r dev.in -dunamai==1.23.1 +dunamai==1.24.1 # via copier fqdn==1.5.1 # via jsonschema funcy==2.0 # via copier -h11==0.14.0 +h11==0.16.0 # via httpcore -httpcore==1.0.7 +httpcore==1.0.9 # via httpx httpx==0.28.1 # via jupyterlab @@ -57,7 +57,7 @@ jupyter-events==0.12.0 # via jupyter-server jupyter-lsp==2.2.5 # via jupyterlab -jupyter-server==2.15.0 +jupyter-server==2.16.0 # via # jupyter-lsp # jupyterlab @@ -65,7 +65,7 @@ jupyter-server==2.15.0 # notebook-shim jupyter-server-terminals==0.5.3 # via jupyter-server -jupyterlab==4.4.0 +jupyterlab==4.4.2 # via -r dev.in jupyterlab-server==2.27.3 # via jupyterlab @@ -75,7 +75,7 @@ overrides==7.7.0 # via jupyter-server pathspec==0.12.1 # via copier -pip-compile-multi==2.8.0 +pip-compile-multi==3.0.0 # via -r dev.in pip-tools==7.4.1 # via pip-compile-multi diff --git a/requirements/docs.txt b/requirements/docs.txt index d1ca2dbd..507fad62 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -18,17 +18,17 @@ babel==2.17.0 # via # pydata-sphinx-theme # sphinx -beautifulsoup4==4.13.3 +beautifulsoup4==4.13.4 # via # nbconvert # pydata-sphinx-theme bleach[css]==6.2.0 # via nbconvert -certifi==2025.1.31 +certifi==2025.4.26 # via requests -charset-normalizer==3.4.1 +charset-normalizer==3.4.2 # via requests -debugpy==1.8.13 +debugpy==1.8.14 # via ipykernel defusedxml==0.7.1 # via nbconvert @@ -56,7 +56,7 @@ jinja2==3.1.6 # sphinx jsonschema==4.23.0 # via nbformat -jsonschema-specifications==2024.10.1 +jsonschema-specifications==2025.4.1 # via jsonschema jupyter-client==8.6.3 # via @@ -106,7 +106,7 @@ pandas==2.2.3 # via -r docs.in pandocfilters==1.5.1 # via nbconvert -platformdirs==4.3.7 +platformdirs==4.3.8 # via # jupyter-core # pooch @@ -114,7 +114,7 @@ pooch==1.8.2 # via -r docs.in psutil==7.0.0 # via ipykernel -pyarrow==19.0.1 +pyarrow==20.0.0 # via -r docs.in pybtex==0.24.0 # via @@ -142,9 +142,9 @@ rpds-py==0.24.0 # via # jsonschema # referencing -snowballstemmer==2.2.0 +snowballstemmer==3.0.1 # via sphinx -soupsieve==2.6 +soupsieve==2.7 # via beautifulsoup4 sphinx==8.1.3 # via @@ -186,7 +186,7 @@ tornado==6.4.2 # jupyter-client tzdata==2025.2 # via pandas -urllib3==2.3.0 +urllib3==2.4.0 # via requests webencodings==0.5.1 # via diff --git a/requirements/mypy.txt b/requirements/mypy.txt index 61d88db1..77e99ca3 100644 --- a/requirements/mypy.txt +++ b/requirements/mypy.txt @@ -8,5 +8,5 @@ -r test.txt mypy==1.15.0 # via -r mypy.in -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy diff --git a/requirements/nightly.txt b/requirements/nightly.txt index f57f2471..e78e93d1 100644 --- a/requirements/nightly.txt +++ b/requirements/nightly.txt @@ -12,23 +12,23 @@ annotated-types==0.7.0 # via pydantic asttokens==3.0.0 # via stack-data -certifi==2025.1.31 +certifi==2025.4.26 # via requests -charset-normalizer==3.4.1 +charset-normalizer==3.4.2 # via requests -click==8.1.8 +click==8.2.0 # via dask cloudpickle==3.1.1 # via dask comm==0.2.2 # via ipywidgets -contourpy==1.3.1 +contourpy==1.3.2 # via matplotlib cyclebane==24.10.0 # via sciline cycler==0.12.1 # via matplotlib -dask==2025.3.0 +dask==2025.5.0 # via -r nightly.in decorator==5.2.1 # via ipython @@ -38,13 +38,13 @@ email-validator==2.2.0 # via scippneutron essreduce @ git+https://github.com/scipp/essreduce@main # via -r nightly.in -exceptiongroup==1.2.2 +exceptiongroup==1.3.0 # via # ipython # pytest executing==2.2.0 # via stack-data -fonttools==4.57.0 +fonttools==4.58.0 # via matplotlib fsspec==2025.3.2 # via dask @@ -58,23 +58,21 @@ idna==3.10 # via # email-validator # requests -importlib-metadata==8.6.1 +importlib-metadata==8.7.0 # via dask -importlib-resources==6.5.2 - # via tof iniconfig==2.1.0 # via pytest ipydatawidgets==4.3.5 # via pythreejs -ipython==8.35.0 +ipython==8.36.0 # via ipywidgets -ipywidgets==8.1.5 +ipywidgets==8.1.7 # via # ipydatawidgets # pythreejs jedi==0.19.2 # via ipython -jupyterlab-widgets==3.0.13 +jupyterlab-widgets==3.0.15 # via ipywidgets kiwisolver==1.4.8 # via matplotlib @@ -82,32 +80,32 @@ lazy-loader==0.4 # via # plopp # scippneutron + # tof locket==1.0.0 # via partd -matplotlib==3.10.1 +matplotlib==3.10.3 # via # mpltoolbox # plopp matplotlib-inline==0.1.7 # via ipython -mpltoolbox==24.5.1 +mpltoolbox==25.4.0 # via scippneutron networkx==3.4.2 # via cyclebane -numpy==2.2.4 +numpy==2.2.5 # via # -r nightly.in # contourpy # h5py # ipydatawidgets # matplotlib - # mpltoolbox # pandas # pythreejs # scipp # scippneutron # scipy -packaging==24.2 +packaging==25.0 # via # dask # lazy-loader @@ -122,9 +120,9 @@ partd==1.4.2 # via dask pexpect==4.9.0 # via ipython -pillow==11.1.0 +pillow==11.2.1 # via matplotlib -platformdirs==4.3.7 +platformdirs==4.3.8 # via pooch plopp @ git+https://github.com/scipp/plopp@main # via @@ -135,15 +133,15 @@ pluggy==1.5.0 # via pytest pooch==1.8.2 # via -r nightly.in -prompt-toolkit==3.0.50 +prompt-toolkit==3.0.51 # via ipython ptyprocess==0.7.0 # via pexpect pure-eval==0.2.3 # via stack-data -pydantic==2.11.3 +pydantic==2.11.4 # via scippneutron -pydantic-core==2.33.1 +pydantic-core==2.33.2 # via pydantic pygments==2.19.1 # via ipython @@ -185,7 +183,7 @@ scippnexus @ git+https://github.com/scipp/scippnexus@main # -r nightly.in # essreduce # scippneutron -scipy==1.15.2 +scipy==1.15.3 # via # scippneutron # scippnexus @@ -212,8 +210,9 @@ traitlets==5.14.3 # traittypes traittypes==0.2.1 # via ipydatawidgets -typing-extensions==4.13.1 +typing-extensions==4.13.2 # via + # exceptiongroup # ipython # pydantic # pydantic-core @@ -223,11 +222,11 @@ typing-inspection==0.4.0 # via pydantic tzdata==2025.2 # via pandas -urllib3==2.3.0 +urllib3==2.4.0 # via requests wcwidth==0.2.13 # via prompt-toolkit -widgetsnbextension==4.0.13 +widgetsnbextension==4.0.14 # via ipywidgets zipp==3.21.0 # via importlib-metadata diff --git a/requirements/static.txt b/requirements/static.txt index c0e73660..118b39ca 100644 --- a/requirements/static.txt +++ b/requirements/static.txt @@ -11,15 +11,15 @@ distlib==0.3.9 # via virtualenv filelock==3.18.0 # via virtualenv -identify==2.6.9 +identify==2.6.10 # via pre-commit nodeenv==1.9.1 # via pre-commit -platformdirs==4.3.7 +platformdirs==4.3.8 # via virtualenv pre-commit==4.2.0 # via -r static.in pyyaml==6.0.2 # via pre-commit -virtualenv==20.30.0 +virtualenv==20.31.2 # via pre-commit diff --git a/requirements/wheels.txt b/requirements/wheels.txt index bfae20bf..21d3cb06 100644 --- a/requirements/wheels.txt +++ b/requirements/wheels.txt @@ -7,7 +7,7 @@ # build==1.2.2.post1 # via -r wheels.in -packaging==24.2 +packaging==25.0 # via build pyproject-hooks==1.2.0 # via build diff --git a/src/ess/dream/data.py b/src/ess/dream/data.py index 3a8697ac..82c73ce0 100644 --- a/src/ess/dream/data.py +++ b/src/ess/dream/data.py @@ -2,7 +2,7 @@ # Copyright (c) 2023 Scipp contributors (https://github.com/scipp) """Data for tests and documentation with DREAM.""" -_version = "1" +_version = "2" __all__ = ["get_path"] @@ -21,16 +21,16 @@ def _make_pooch(): "DREAM_nexus_sorted-2023-12-07.nxs": "md5:22824e14f6eb950d24a720b2a0e2cb66", "DREAM_simple_pwd_workflow/data_dream_diamond_vana_container_sample_union.csv.zip": "md5:33302d0506b36aab74003b8aed4664cc", # noqa: E501 "DREAM_simple_pwd_workflow/data_dream_diamond_vana_container_sample_union_run2.csv.zip": "md5:c7758682f978d162dcb91e47c79abb83", # noqa: E501 - "DREAM_simple_pwd_workflow/data_dream_vana_container_sample_union.csv.zip": "md5:1e22917b2bb68b5cacfb506b72700a4d", # noqa: E501 + "DREAM_simple_pwd_workflow/data_dream_vana_container_sample_union.csv.zip": "md5:b8bb7c4bdf74ad5f19342bced8670915", # noqa: E501 "DREAM_simple_pwd_workflow/data_dream_vanadium.csv.zip": "md5:e5addfc06768140c76533946433fa2ec", # noqa: E501 "DREAM_simple_pwd_workflow/data_dream_vanadium_inc_coh.csv.zip": "md5:39d1a44e248b12966b26f7c2f6c602a2", # noqa: E501 "DREAM_simple_pwd_workflow/Cave_TOF_Monitor_diam_in_can.dat": "md5:ef24f4a4186c628574046e6629e31611", # noqa: E501 - "DREAM_simple_pwd_workflow/Cave_TOF_Monitor_van_can.dat": "md5:e63456c347fb36a362a0b5ae2556b3cf", # noqa: E501 + "DREAM_simple_pwd_workflow/Cave_TOF_Monitor_van_can.dat": "md5:2cdef7ad9912652149b7e687381d2e99", # noqa: E501 "DREAM_simple_pwd_workflow/Cave_TOF_Monitor_vana_inc_coh.dat": "md5:701d66792f20eb283a4ce76bae0c8f8f", # noqa: E501 - "DREAM-high-flux-tof-lookup-table.h5": "md5:404145a970ed1188e524cba10194610e", # noqa: E501 + "DREAM-high-flux-tof-lookup-table.h5": "md5:1b95a359fa7b0d8b4277806ece9bf279", # noqa: E501 # Smaller files for unit tests "DREAM_simple_pwd_workflow/TEST_data_dream_diamond_vana_container_sample_union.csv.zip": "md5:018a87e0934c1dd0f07a708e9d497891", # noqa: E501 - "DREAM_simple_pwd_workflow/TEST_data_dream_vana_container_sample_union.csv.zip": "md5:6b4b6c3a7358cdb1dc5a36b56291ab1b", # noqa: E501 + "DREAM_simple_pwd_workflow/TEST_data_dream_vana_container_sample_union.csv.zip": "md5:d244126cd8012f9ed186f4e08a19f88d", # noqa: E501 "DREAM_simple_pwd_workflow/TEST_data_dream_vanadium.csv.zip": "md5:178f9bea9f35dbdef693e38ff893c258", # noqa: E501 "TEST_data_dream0_new_hkl_Si_pwd.csv.zip": "md5:df6c41f4b7b21e129915808f625828f6", # noqa: E501 "TEST_data_dream_with_sectors.csv.zip": "md5:2a6b5e40e6b67f6c71b25373bf4b11a1", # noqa: E501 diff --git a/src/ess/dream/io/geant4.py b/src/ess/dream/io/geant4.py index ae969af6..ece8c317 100644 --- a/src/ess/dream/io/geant4.py +++ b/src/ess/dream/io/geant4.py @@ -4,12 +4,12 @@ import numpy as np import sciline import scipp as sc -import scippneutron as scn import scippnexus as snx from scippneutron.metadata import ESS_SOURCE from ess.powder.types import ( Beamline, + CalibratedBeamline, CalibratedDetector, CalibratedMonitor, CalibrationData, @@ -17,11 +17,9 @@ CaveMonitor, CaveMonitorPosition, DetectorData, - DetectorLtotal, Filename, MonitorData, MonitorFilename, - MonitorLtotal, MonitorType, NeXusComponent, NeXusDetectorName, @@ -31,8 +29,7 @@ Source, VanadiumRun, ) -from ess.reduce.nexus.types import CalibratedBeamline -from ess.reduce.nexus.workflow import GenericNeXusWorkflow +from ess.reduce.time_of_flight.workflow import GenericTofWorkflow MANTLE_DETECTOR_ID = sc.index(7) HIGH_RES_DETECTOR_ID = sc.index(8) @@ -276,30 +273,22 @@ def assemble_detector_data( out.bins.coords['event_time_offset'] = out.bins.coords['tof'] % period.to( unit=detector.bins.coords['tof'].bins.unit ) - graph = scn.conversion.graph.beamline.beamline(scatter=True) - return DetectorData[RunType]( - out.bins.drop_coords('tof').transform_coords( - "Ltotal", graph=graph, keep_intermediate=True - ) - ) + return DetectorData[RunType](out.bins.drop_coords('tof')) def assemble_monitor_data( monitor: CalibratedMonitor[RunType, MonitorType], ) -> MonitorData[RunType, MonitorType]: """ - Dummy assembly of monitor data, monitor already contains neutron data. - We simply add a Ltotal coordinate necessary to calculate the time-of-flight. + Dummy assembly of monitor data, monitor already contains neutron data with all + necessary coordinates. Parameters ---------- monitor: The calibrated monitor data. """ - graph = scn.conversion.graph.beamline.beamline(scatter=False) - return MonitorData[RunType, MonitorType]( - monitor.transform_coords("Ltotal", graph=graph) - ) + return MonitorData[RunType, MonitorType](monitor) def dummy_source_position() -> Position[snx.NXsource, RunType]: @@ -314,28 +303,6 @@ def dummy_sample_position() -> Position[snx.NXsample, RunType]: ) -def extract_detector_ltotal(detector: DetectorData[RunType]) -> DetectorLtotal[RunType]: - """ - Extract Ltotal from the detector data. - TODO: This is a temporary implementation. We should instead read the positions - separately from the event data, so we don't need to re-load the positions every time - new events come in while streaming live data. - """ - return DetectorLtotal[RunType](detector.coords["Ltotal"]) - - -def extract_monitor_ltotal( - monitor: MonitorData[RunType, MonitorType], -) -> MonitorLtotal[RunType, MonitorType]: - """ - Extract Ltotal from the monitor data. - TODO: This is a temporary implementation. We should instead read the positions - separately from the event data, so we don't need to re-load the positions every time - new events come in while streaming live data. - """ - return MonitorLtotal[RunType, MonitorType](monitor.coords["Ltotal"]) - - def dream_beamline() -> Beamline: return Beamline( name="DREAM", @@ -352,7 +319,7 @@ def LoadGeant4Workflow() -> sciline.Pipeline: """ Workflow for loading NeXus data. """ - wf = GenericNeXusWorkflow( + wf = GenericTofWorkflow( run_types=[SampleRun, VanadiumRun], monitor_types=[CaveMonitor] ) wf.insert(extract_geant4_detector) @@ -364,8 +331,6 @@ def LoadGeant4Workflow() -> sciline.Pipeline: wf.insert(assemble_monitor_data) wf.insert(dummy_source_position) wf.insert(dummy_sample_position) - wf.insert(extract_detector_ltotal) - wf.insert(extract_monitor_ltotal) wf.insert(dream_beamline) wf.insert(ess_source) return wf diff --git a/src/ess/dream/workflow.py b/src/ess/dream/workflow.py index d5ff3472..7525f1fb 100644 --- a/src/ess/dream/workflow.py +++ b/src/ess/dream/workflow.py @@ -12,6 +12,7 @@ from ess.powder import providers as powder_providers from ess.powder import with_pixel_mask_filenames +from ess.powder.conversion import convert_monitor_to_wavelength from ess.powder.correction import ( RunNormalization, insert_run_normalization, @@ -19,18 +20,23 @@ from ess.powder.types import ( AccumulatedProtonCharge, CaveMonitorPosition, # Should this be a DREAM-only parameter? + MonitorType, PixelMaskFilename, Position, ReducerSoftwares, + ResampledMonitorTofData, + RunType, SampleRun, TimeOfFlightLookupTableFilename, TofMask, TwoThetaMask, VanadiumRun, WavelengthMask, + WavelengthMonitor, ) from ess.reduce import time_of_flight from ess.reduce.parameter import parameter_mappers +from ess.reduce.time_of_flight import resample_monitor_time_of_flight_data from ess.reduce.workflow import register_workflow from .beamline import InstrumentConfiguration @@ -92,6 +98,18 @@ def _collect_reducer_software() -> ReducerSoftwares: ) +def convert_dream_monitor_to_wavelength( + monitor: ResampledMonitorTofData[RunType, MonitorType], +) -> WavelengthMonitor[RunType, MonitorType]: + """ + We know that DREAM monitors are recording in histogram mode, so we need to use the + resampled monitor data to avoid having nans in the time-of-flight coordinates. + + This provider should be inserted in the Dream workflow below. + """ + return convert_monitor_to_wavelength(monitor) + + def DreamGeant4Workflow(*, run_norm: RunNormalization) -> sciline.Pipeline: """ Workflow with default parameters for the Dream Geant4 simulation. @@ -99,6 +117,8 @@ def DreamGeant4Workflow(*, run_norm: RunNormalization) -> sciline.Pipeline: wf = LoadGeant4Workflow() for provider in itertools.chain(powder_providers, _dream_providers): wf.insert(provider) + wf.insert(convert_dream_monitor_to_wavelength) + wf.insert(resample_monitor_time_of_flight_data) insert_run_normalization(wf, run_norm) for key, value in itertools.chain( default_parameters().items(), time_of_flight.default_parameters().items() diff --git a/src/ess/powder/conversion.py b/src/ess/powder/conversion.py index 6f0da5a1..c930daaa 100644 --- a/src/ess/powder/conversion.py +++ b/src/ess/powder/conversion.py @@ -4,42 +4,24 @@ Coordinate transformations for powder diffraction. """ -import sciline as sl import scipp as sc import scippneutron as scn -from ess.reduce import time_of_flight - from .calibration import OutputCalibrationData from .correction import merge_calibration from .logging import get_logger from .types import ( CalibrationData, DataWithScatteringCoordinates, - DetectorData, - DetectorLtotal, - DistanceResolution, DspacingData, ElasticCoordTransformGraph, FilteredData, IofDspacing, IofTof, - LookupTableRelativeErrorThreshold, - LtotalRange, MaskedData, - MonitorData, - MonitorLtotal, + MonitorTofData, MonitorType, - PulsePeriod, - PulseStride, - PulseStrideOffset, RunType, - SimulationResults, - TimeOfFlightLookupTable, - TimeOfFlightLookupTableFilename, - TimeResolution, - TofData, - TofMonitorData, WavelengthMonitor, ) @@ -247,81 +229,8 @@ def convert_reduced_to_tof( ) -def build_tof_lookup_table( - simulation: SimulationResults, - ltotal_range: LtotalRange, - pulse_period: PulsePeriod, - pulse_stride: PulseStride, - pulse_stride_offset: PulseStrideOffset, - distance_resolution: DistanceResolution, - time_resolution: TimeResolution, - error_threshold: LookupTableRelativeErrorThreshold, -) -> TimeOfFlightLookupTable: - wf = sl.Pipeline( - time_of_flight.providers(), params=time_of_flight.default_parameters() - ) - wf[time_of_flight.SimulationResults] = simulation - wf[time_of_flight.LtotalRange] = ltotal_range - wf[time_of_flight.PulsePeriod] = pulse_period - wf[time_of_flight.PulseStride] = pulse_stride - wf[time_of_flight.PulseStrideOffset] = pulse_stride_offset - wf[time_of_flight.DistanceResolution] = distance_resolution - wf[time_of_flight.TimeResolution] = time_resolution - wf[time_of_flight.LookupTableRelativeErrorThreshold] = error_threshold - return wf.compute(time_of_flight.TimeOfFlightLookupTable) - - -def load_tof_lookup_table( - filename: TimeOfFlightLookupTableFilename, -) -> TimeOfFlightLookupTable: - return TimeOfFlightLookupTable(sc.io.load_hdf5(filename)) - - -def compute_detector_time_of_flight( - detector_data: DetectorData[RunType], - lookup: TimeOfFlightLookupTable, - ltotal: DetectorLtotal[RunType], - pulse_period: PulsePeriod, - pulse_stride: PulseStride, - pulse_stride_offset: PulseStrideOffset, -) -> TofData[RunType]: - wf = sl.Pipeline( - time_of_flight.providers(), params=time_of_flight.default_parameters() - ) - wf[time_of_flight.RawData] = detector_data - wf[time_of_flight.TimeOfFlightLookupTable] = lookup - wf[time_of_flight.Ltotal] = ltotal - wf[time_of_flight.PulsePeriod] = pulse_period - wf[time_of_flight.PulseStride] = pulse_stride - wf[time_of_flight.PulseStrideOffset] = pulse_stride_offset - return TofData[RunType](wf.compute(time_of_flight.TofData)) - - -def compute_monitor_time_of_flight( - monitor: MonitorData[RunType, MonitorType], - lookup: TimeOfFlightLookupTable, - ltotal: MonitorLtotal[RunType, MonitorType], - pulse_period: PulsePeriod, - pulse_stride: PulseStride, - pulse_stride_offset: PulseStrideOffset, -) -> TofMonitorData[RunType, MonitorType]: - wf = sl.Pipeline( - time_of_flight.providers(), params=time_of_flight.default_parameters() - ) - wf.insert(time_of_flight.resample_tof_data) - wf[time_of_flight.RawData] = monitor - wf[time_of_flight.TimeOfFlightLookupTable] = lookup - wf[time_of_flight.Ltotal] = ltotal - wf[time_of_flight.PulsePeriod] = pulse_period - wf[time_of_flight.PulseStride] = pulse_stride - wf[time_of_flight.PulseStrideOffset] = pulse_stride_offset - out = wf.compute(time_of_flight.ResampledTofData) - out.masks["zero_counts"] = out.data == sc.scalar(0.0, unit=out.data.unit) - return TofMonitorData[RunType, MonitorType](out) - - def convert_monitor_to_wavelength( - monitor: TofMonitorData[RunType, MonitorType], + monitor: MonitorTofData[RunType, MonitorType], ) -> WavelengthMonitor[RunType, MonitorType]: graph = { **scn.conversion.graph.beamline.beamline(scatter=False), @@ -338,7 +247,4 @@ def convert_monitor_to_wavelength( convert_to_dspacing, convert_reduced_to_tof, convert_monitor_to_wavelength, - compute_detector_time_of_flight, - compute_monitor_time_of_flight, - load_tof_lookup_table, ) diff --git a/src/ess/powder/filtering.py b/src/ess/powder/filtering.py index 7ab27e8a..39671185 100644 --- a/src/ess/powder/filtering.py +++ b/src/ess/powder/filtering.py @@ -12,7 +12,7 @@ import scipp as sc -from .types import FilteredData, RunType, TofData +from .types import DetectorTofData, FilteredData, RunType def _equivalent_bin_indices(a, b) -> bool: @@ -72,7 +72,7 @@ def remove_bad_pulses( return filtered -def filter_events(data: TofData[RunType]) -> FilteredData[RunType]: +def filter_events(data: DetectorTofData[RunType]) -> FilteredData[RunType]: """Remove bad events. Attention diff --git a/src/ess/powder/types.py b/src/ess/powder/types.py index eeccd8f0..cf127809 100644 --- a/src/ess/powder/types.py +++ b/src/ess/powder/types.py @@ -1,5 +1,5 @@ # SPDX-License-Identifier: BSD-3-Clause -# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) """This module defines the domain types used in ess.powder. @@ -26,6 +26,7 @@ BunkerMonitor = reduce_t.Monitor2 CalibratedBeamline = reduce_t.CalibratedBeamline CalibratedDetector = reduce_t.CalibratedDetector +CalibratedBeamline = reduce_t.CalibratedBeamline CalibratedMonitor = reduce_t.CalibratedMonitor DetectorData = reduce_t.DetectorData DetectorPositionOffset = reduce_t.DetectorPositionOffset @@ -43,6 +44,9 @@ DetectorBankSizes = reduce_t.DetectorBankSizes +DetectorTofData = tof_t.DetectorTofData +MonitorTofData = tof_t.MonitorTofData +ResampledMonitorTofData = tof_t.ResampledMonitorTofData PulsePeriod = tof_t.PulsePeriod PulseStride = tof_t.PulseStride PulseStrideOffset = tof_t.PulseStrideOffset @@ -51,6 +55,7 @@ LtotalRange = tof_t.LtotalRange LookupTableRelativeErrorThreshold = tof_t.LookupTableRelativeErrorThreshold TimeOfFlightLookupTable = tof_t.TimeOfFlightLookupTable +TimeOfFlightLookupTableFilename = tof_t.TimeOfFlightLookupTableFilename SimulationResults = tof_t.SimulationResults RunType = TypeVar("RunType", SampleRun, VanadiumRun) @@ -102,10 +107,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: """Detector calibration data.""" -class TofData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): - """Data with time-of-flight coordinate.""" - - class DataWithScatteringCoordinates(sciline.Scope[RunType, sc.DataArray], sc.DataArray): """Data with scattering coordinates computed for all events: wavelength, 2theta, d-spacing.""" @@ -213,20 +214,4 @@ class RawDataAndMetadata(sciline.Scope[RunType, sc.DataGroup], sc.DataGroup): """Reduced data in time-of-flight, ready to be saved to a CIF file.""" -class DetectorLtotal(sciline.Scope[RunType, sc.Variable], sc.Variable): - """Total path length of neutrons from source to detector (L1 + L2).""" - - -class MonitorLtotal(sciline.Scope[RunType, MonitorType, sc.Variable], sc.Variable): - """Total path length of neutrons from source to monitor.""" - - -class TofMonitorData(sciline.Scope[RunType, MonitorType, sc.DataArray], sc.DataArray): - """Monitor data with time-of-flight coordinate.""" - - -TimeOfFlightLookupTableFilename = NewType("TimeOfFlightLookupTableFilename", str) -"""Filename of the time-of-flight lookup table.""" - - del sc, sciline, NewType, TypeVar diff --git a/src/ess/snspowder/powgen/data.py b/src/ess/snspowder/powgen/data.py index 58568e2e..25cae8e6 100644 --- a/src/ess/snspowder/powgen/data.py +++ b/src/ess/snspowder/powgen/data.py @@ -10,11 +10,11 @@ CalibrationData, CalibrationFilename, DetectorBankSizes, + DetectorTofData, Filename, ProtonCharge, RawDataAndMetadata, RunType, - TofData, ) _version = "1" @@ -218,14 +218,14 @@ def pooch_load_calibration( def extract_raw_data( dg: RawDataAndMetadata[RunType], sizes: DetectorBankSizes -) -> TofData[RunType]: +) -> DetectorTofData[RunType]: """Return the events from a loaded data group.""" # Remove the tof binning and dimension, as it is not needed and it gets in the way # of masking. out = dg["data"].squeeze() out.coords.pop("tof", None) out = out.fold(dim="spectrum", sizes=sizes) - return TofData[RunType](out) + return DetectorTofData[RunType](out) def extract_proton_charge(dg: RawDataAndMetadata[RunType]) -> ProtonCharge[RunType]: @@ -234,7 +234,7 @@ def extract_proton_charge(dg: RawDataAndMetadata[RunType]) -> ProtonCharge[RunTy def extract_accumulated_proton_charge( - data: TofData[RunType], + data: DetectorTofData[RunType], ) -> AccumulatedProtonCharge[RunType]: """Return the stored accumulated proton charge from a loaded data group.""" return AccumulatedProtonCharge[RunType](data.coords["gd_prtn_chrg"]) diff --git a/tests/dream/geant4_reduction_test.py b/tests/dream/geant4_reduction_test.py index 5ab84e95..51603cd2 100644 --- a/tests/dream/geant4_reduction_test.py +++ b/tests/dream/geant4_reduction_test.py @@ -141,7 +141,9 @@ def simulation_dream_choppers(): def test_pipeline_can_compute_dspacing_result_using_custom_built_tof_lookup( workflow, simulation_dream_choppers ): - workflow.insert(powder.conversion.build_tof_lookup_table) + from ess.reduce.time_of_flight.eto_to_tof import compute_tof_lookup_table + + workflow.insert(compute_tof_lookup_table) workflow = powder.with_pixel_mask_filenames(workflow, []) workflow[SimulationResults] = simulation_dream_choppers workflow[LtotalRange] = sc.scalar(60.0, unit="m"), sc.scalar(80.0, unit="m") diff --git a/tools/dream-make-tof-lookup-table.ipynb b/tools/dream-make-tof-lookup-table.ipynb index 03f07082..2b07c15f 100644 --- a/tools/dream-make-tof-lookup-table.ipynb +++ b/tools/dream-make-tof-lookup-table.ipynb @@ -60,7 +60,7 @@ "\n", "wf[time_of_flight.LtotalRange] = sc.scalar(60.0, unit=\"m\"), sc.scalar(80.0, unit=\"m\")\n", "wf[time_of_flight.SimulationResults] = time_of_flight.simulate_beamline(\n", - " choppers=disk_choppers, neutrons=5_000_000\n", + " choppers=disk_choppers, neutrons=5_000_000, source_position=sc.vector([0, 0, 0], unit='m'),\n", ")\n", "\n", "wf[time_of_flight.DistanceResolution] = sc.scalar(0.1, unit=\"m\")\n", @@ -97,7 +97,7 @@ "metadata": {}, "outputs": [], "source": [ - "table.squeeze().plot()" + "table.plot()" ] }, { @@ -138,8 +138,7 @@ "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.7" + "pygments_lexer": "ipython3" } }, "nbformat": 4,