From 4833721b31a68ccf30be11fdeaa571f0b1cbaa92 Mon Sep 17 00:00:00 2001 From: Jan-Lukas Wynen Date: Mon, 3 Nov 2025 13:41:34 +0100 Subject: [PATCH 1/2] Require essreduce>=25.11.0 --- pyproject.toml | 2 +- requirements/base.in | 2 +- requirements/base.txt | 9 ++++----- requirements/nightly.in | 2 +- requirements/nightly.txt | 7 +++---- 5 files changed, 10 insertions(+), 12 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c3afc6b1..3bc429a2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ dependencies = [ "scipp>=24.09.1", # Fixed new hist/bin API "scippneutron>=24.10.0", "scippnexus>=24.9.1", - "essreduce>=25.10.2", + "essreduce>=25.11.0", "pandas>=2.1.2", ] diff --git a/requirements/base.in b/requirements/base.in index b2fcd729..d83b9ef1 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -11,5 +11,5 @@ sciline>=24.6.0 scipp>=24.09.1 scippneutron>=24.10.0 scippnexus>=24.9.1 -essreduce>=25.10.2 +essreduce>=25.11.0 pandas>=2.1.2 diff --git a/requirements/base.txt b/requirements/base.txt index bf8543a6..6c2822f5 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,4 +1,4 @@ -# SHA1:8d6f6741be43a1856d2a52aa2ce05d8e8110ee30 +# SHA1:ac5d5ed5d7ed7425eb3b92f0cfa128fe6db146b4 # # This file was generated by pip-compile-multi. # To update, run: @@ -9,7 +9,7 @@ annotated-types==0.7.0 # via pydantic click==8.3.0 # via dask -cloudpickle==3.1.1 +cloudpickle==3.1.2 # via dask contourpy==1.3.3 # via matplotlib @@ -23,7 +23,7 @@ dnspython==2.8.0 # via email-validator email-validator==2.3.0 # via scippneutron -essreduce==25.10.2 +essreduce==25.11.0 # via -r base.in fonttools==4.60.1 # via matplotlib @@ -100,7 +100,7 @@ pyyaml==6.0.3 # via # dask # orsopy -sciline==25.8.0 +sciline==25.11.1 # via # -r base.in # essreduce @@ -133,7 +133,6 @@ typing-extensions==4.15.0 # via # pydantic # pydantic-core - # sciline # typing-inspection typing-inspection==0.4.2 # via pydantic diff --git a/requirements/nightly.in b/requirements/nightly.in index 802ec85c..a598c800 100644 --- a/requirements/nightly.in +++ b/requirements/nightly.in @@ -5,7 +5,7 @@ dask>=2022.1.0 python-dateutil graphviz orsopy>=1.2 -essreduce>=25.10.2 +essreduce>=25.11.0 pandas>=2.1.2 pytest>=7.0 pooch>=1.5 diff --git a/requirements/nightly.txt b/requirements/nightly.txt index deb6dde0..b1a1dc31 100644 --- a/requirements/nightly.txt +++ b/requirements/nightly.txt @@ -1,4 +1,4 @@ -# SHA1:bc4cf516bc2a50b9d42c8baa03b95ea4d7e54a3d +# SHA1:1e183c3e9d303722f2579385b8d7861f9200b810 # # This file was generated by pip-compile-multi. # To update, run: @@ -16,7 +16,7 @@ charset-normalizer==3.4.4 # via requests click==8.3.0 # via dask -cloudpickle==3.1.1 +cloudpickle==3.1.2 # via dask contourpy==1.3.3 # via matplotlib @@ -30,7 +30,7 @@ dnspython==2.8.0 # via email-validator email-validator==2.3.0 # via scippneutron -essreduce==25.10.2 +essreduce==25.11.0 # via -r nightly.in fonttools==4.60.1 # via matplotlib @@ -157,7 +157,6 @@ typing-extensions==4.15.0 # via # pydantic # pydantic-core - # sciline # typing-inspection typing-inspection==0.4.2 # via pydantic From 3b088a1371ae4d9cce366ff2651d88db7b80649b Mon Sep 17 00:00:00 2001 From: Jan-Lukas Wynen Date: Mon, 3 Nov 2025 14:41:08 +0100 Subject: [PATCH 2/2] Use new domain types from ESSreduce --- docs/user-guide/offspec/offspec_reduction.ipynb | 8 ++++---- src/ess/amor/__init__.py | 8 +++++++- src/ess/amor/load.py | 10 +++++----- src/ess/amor/workflow.py | 4 ++-- src/ess/estia/corrections.py | 4 ++-- src/ess/estia/load.py | 6 +++--- src/ess/estia/workflow.py | 6 +++++- src/ess/offspec/load.py | 4 ++-- src/ess/offspec/types.py | 3 ++- src/ess/offspec/workflow.py | 12 +++++++++--- src/ess/reflectometry/orso.py | 2 +- src/ess/reflectometry/types.py | 8 ++++---- tests/amor/pipeline_test.py | 2 +- tests/reflectometry/orso_test.py | 9 ++++++++- 14 files changed, 55 insertions(+), 31 deletions(-) diff --git a/docs/user-guide/offspec/offspec_reduction.ipynb b/docs/user-guide/offspec/offspec_reduction.ipynb index 90cbf727..3e876fad 100644 --- a/docs/user-guide/offspec/offspec_reduction.ipynb +++ b/docs/user-guide/offspec/offspec_reduction.ipynb @@ -113,8 +113,8 @@ "header.data_source.measurement = fileio.data_source.Measurement(\n", " instrument_settings=fileio.data_source.InstrumentSettings(\n", " incident_angle=fileio.base.Value(\n", - " wf.compute(DetectorData[SampleRun]).coords[\"theta\"].value,\n", - " wf.compute(DetectorData[SampleRun]).coords[\"theta\"].unit\n", + " wf.compute(RawDetector[SampleRun]).coords[\"theta\"].value,\n", + " wf.compute(RawDetector[SampleRun]).coords[\"theta\"].unit\n", " ),\n", " wavelength=None,\n", " polarization=\"unpolarized\",\n", @@ -183,8 +183,8 @@ "metadata": {}, "outputs": [], "source": [ - "wf.compute(DetectorData[SampleRun]).hist(tof=50).plot(norm='log') \\\n", - "+ wf.compute(DetectorData[ReferenceRun]).hist(tof=50).plot(norm='log')" + "wf.compute(RawDetector[SampleRun]).hist(tof=50).plot(norm='log') \\\n", + "+ wf.compute(RawDetector[ReferenceRun]).hist(tof=50).plot(norm='log')" ] }, { diff --git a/src/ess/amor/__init__.py b/src/ess/amor/__init__.py index 4148b94b..6270524c 100644 --- a/src/ess/amor/__init__.py +++ b/src/ess/amor/__init__.py @@ -14,8 +14,10 @@ DetectorSpatialResolution, NeXusDetectorName, Position, + ReferenceRun, RunType, SampleRotationOffset, + SampleRun, ) from . import ( conversions, @@ -81,7 +83,11 @@ def AmorWorkflow() -> sciline.Pipeline: """ Workflow with default parameters for the Amor PSI instrument. """ - return sciline.Pipeline(providers=providers, params=default_parameters()) + return sciline.Pipeline( + providers=providers, + params=default_parameters(), + constraints={RunType: [SampleRun, ReferenceRun]}, + ) __all__ = [ diff --git a/src/ess/amor/load.py b/src/ess/amor/load.py index 029f7908..5d6efe59 100644 --- a/src/ess/amor/load.py +++ b/src/ess/amor/load.py @@ -10,7 +10,6 @@ from ..reflectometry.types import ( Beamline, BeamSize, - DetectorData, DetectorRotation, Filename, Measurement, @@ -18,6 +17,7 @@ NeXusDetectorName, ProtonCurrent, RawChopper, + RawDetector, RawSampleRotation, RunType, SampleRotation, @@ -49,7 +49,7 @@ def load_events( chopper_separation: ChopperSeparation[RunType], sample_size: SampleSize[RunType], beam_size: BeamSize[RunType], -) -> DetectorData[RunType]: +) -> RawDetector[RunType]: event_data = detector["data"] if 'event_time_zero' in event_data.coords: event_data.bins.coords['event_time_zero'] = sc.bins_like( @@ -77,7 +77,7 @@ def load_events( data.coords["chopper_distance"] = chopper_distance data.coords["sample_size"] = sample_size data.coords["beam_size"] = beam_size - return DetectorData[RunType](data) + return RawDetector[RunType](data) def amor_chopper(f: Filename[RunType]) -> RawChopper[RunType]: @@ -133,13 +133,13 @@ def load_amor_proton_current( return pc -def load_beamline_metadata(filename: Filename[SampleRun]) -> Beamline: +def load_beamline_metadata(filename: Filename[RunType]) -> Beamline[RunType]: return nexus_workflow.load_beamline_metadata_from_nexus( NeXusFileSpec[SampleRun](filename) ) -def load_measurement_metadata(filename: Filename[SampleRun]) -> Measurement: +def load_measurement_metadata(filename: Filename[RunType]) -> Measurement[RunType]: return nexus_workflow.load_measurement_metadata_from_nexus( NeXusFileSpec[SampleRun](filename) ) diff --git a/src/ess/amor/workflow.py b/src/ess/amor/workflow.py index 828abe4f..0c1dcdf3 100644 --- a/src/ess/amor/workflow.py +++ b/src/ess/amor/workflow.py @@ -8,8 +8,8 @@ from ..reflectometry.types import ( BeamDivergenceLimits, CoordTransformationGraph, - DetectorData, ProtonCurrent, + RawDetector, ReducibleData, RunType, WavelengthBins, @@ -20,7 +20,7 @@ def add_coords_masks_and_apply_corrections( - da: DetectorData[RunType], + da: RawDetector[RunType], ylim: YIndexLimits, zlims: ZIndexLimits, bdlim: BeamDivergenceLimits, diff --git a/src/ess/estia/corrections.py b/src/ess/estia/corrections.py index db5f60c5..2cdcefa2 100644 --- a/src/ess/estia/corrections.py +++ b/src/ess/estia/corrections.py @@ -11,8 +11,8 @@ from ..reflectometry.types import ( BeamDivergenceLimits, CoordTransformationGraph, - DetectorData, ProtonCurrent, + RawDetector, ReducibleData, RunType, WavelengthBins, @@ -23,7 +23,7 @@ def add_coords_masks_and_apply_corrections( - da: DetectorData[RunType], + da: RawDetector[RunType], ylim: YIndexLimits, zlims: ZIndexLimits, bdlim: BeamDivergenceLimits, diff --git a/src/ess/estia/load.py b/src/ess/estia/load.py index 663c9a63..fb1c2c8a 100644 --- a/src/ess/estia/load.py +++ b/src/ess/estia/load.py @@ -4,8 +4,8 @@ import scipp as sc from ..reflectometry.types import ( - DetectorData, Filename, + RawDetector, RunType, SampleRotationOffset, ) @@ -16,7 +16,7 @@ def load_mcstas_events( filename: Filename[RunType], sample_rotation_offset: SampleRotationOffset[RunType], -) -> DetectorData[RunType]: +) -> RawDetector[RunType]: """ Load event data from a McStas run and reshape it to look like what we would expect if @@ -96,7 +96,7 @@ def load_mcstas_events( ) da.bins.coords.pop('L') da.bins.coords.pop('t') - return DetectorData[RunType](da) + return RawDetector[RunType](da) providers = () diff --git a/src/ess/estia/workflow.py b/src/ess/estia/workflow.py index efe09daf..092dd1d2 100644 --- a/src/ess/estia/workflow.py +++ b/src/ess/estia/workflow.py @@ -10,8 +10,10 @@ BeamDivergenceLimits, DetectorSpatialResolution, NeXusDetectorName, + ReferenceRun, RunType, SampleRotationOffset, + SampleRun, ) from . import beamline, conversions, corrections, load, maskings, normalization, orso @@ -66,7 +68,9 @@ def default_parameters() -> dict: def EstiaMcStasWorkflow() -> sciline.Pipeline: """Workflow for reduction of McStas data for the Estia instrument.""" return sciline.Pipeline( - providers=mcstas_providers, params=mcstas_default_parameters() + providers=mcstas_providers, + params=mcstas_default_parameters(), + constraints={RunType: [SampleRun, ReferenceRun]}, ) diff --git a/src/ess/offspec/load.py b/src/ess/offspec/load.py index 9c199f88..b98a0f75 100644 --- a/src/ess/offspec/load.py +++ b/src/ess/offspec/load.py @@ -2,13 +2,13 @@ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import scipp as sc -from ..reflectometry.types import DetectorData, Filename, ReferenceRun, RunType +from ..reflectometry.types import Filename, RawDetector, ReferenceRun, RunType from .types import CoordTransformationGraph, MonitorData, NeXusMonitorName def load_offspec_events( filename: Filename[RunType], -) -> DetectorData[RunType]: +) -> RawDetector[RunType]: full = sc.io.load_hdf5(filename) da = full['data'] da.coords['theta'] = full.pop('Theta')[-1].data diff --git a/src/ess/offspec/types.py b/src/ess/offspec/types.py index 388de10f..4bb4cf89 100644 --- a/src/ess/offspec/types.py +++ b/src/ess/offspec/types.py @@ -4,6 +4,7 @@ import sciline import scipp as sc +import scippnexus as snx from ess.reduce.nexus import types as reduce_t @@ -21,4 +22,4 @@ class MonitorData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): """ "Monitor data from the run file, with background subtracted""" -NeXusMonitorName = reduce_t.NeXusName +NeXusMonitorName = reduce_t.NeXusName[snx.NXmonitor] diff --git a/src/ess/offspec/workflow.py b/src/ess/offspec/workflow.py index 161d74c1..741ec6c9 100644 --- a/src/ess/offspec/workflow.py +++ b/src/ess/offspec/workflow.py @@ -4,9 +4,11 @@ from ..reflectometry import providers as reflectometry_providers from ..reflectometry.types import ( - DetectorData, + RawDetector, ReducibleData, + ReferenceRun, RunType, + SampleRun, WavelengthBins, ) from . import conversions, load, maskings, normalization @@ -33,11 +35,15 @@ def OffspecWorkflow() -> sciline.Pipeline: *maskings.providers, *normalization.providers, ) - return sciline.Pipeline(providers=ps, params={NeXusMonitorName: 'monitor2'}) + return sciline.Pipeline( + providers=ps, + params={NeXusMonitorName: 'monitor2'}, + constraints={RunType: [SampleRun, ReferenceRun]}, + ) def add_coords_masks_and_apply_corrections( - da: DetectorData[RunType], + da: RawDetector[RunType], spectrum_limits: SpectrumLimits, wlims: WavelengthBins, wbmin: BackgroundMinWavelength, diff --git a/src/ess/reflectometry/orso.py b/src/ess/reflectometry/orso.py index 436a33ae..472cebef 100644 --- a/src/ess/reflectometry/orso.py +++ b/src/ess/reflectometry/orso.py @@ -62,7 +62,7 @@ def parse_orso_experiment( - beamline: Beamline, measurement: Measurement + beamline: Beamline[SampleRun], measurement: Measurement[SampleRun] ) -> OrsoExperiment: """Parse ORSO experiment metadata from raw NeXus data.""" return OrsoExperiment( diff --git a/src/ess/reflectometry/types.py b/src/ess/reflectometry/types.py index f1eea80a..739c4ded 100644 --- a/src/ess/reflectometry/types.py +++ b/src/ess/reflectometry/types.py @@ -1,6 +1,6 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) -from typing import Any, NewType, TypeVar +from typing import Any, NewType import sciline import scipp as sc @@ -9,11 +9,11 @@ SampleRun = reduce_t.SampleRun ReferenceRun = NewType("ReferenceRun", int) -RunType = TypeVar("RunType", ReferenceRun, SampleRun) +RunType = reduce_t.RunType Beamline = reduce_t.Beamline -CalibratedDetector = reduce_t.CalibratedDetector -DetectorData = reduce_t.DetectorData +EmptyDetector = reduce_t.EmptyDetector +RawDetector = reduce_t.RawDetector DetectorPositionOffset = reduce_t.DetectorPositionOffset Filename = reduce_t.Filename Measurement = reduce_t.Measurement diff --git a/tests/amor/pipeline_test.py b/tests/amor/pipeline_test.py index b56427ce..4579741a 100644 --- a/tests/amor/pipeline_test.py +++ b/tests/amor/pipeline_test.py @@ -43,7 +43,7 @@ @pytest.fixture def amor_pipeline() -> sciline.Pipeline: - pl = sciline.Pipeline(providers=amor.providers, params=amor.default_parameters()) + pl = amor.AmorWorkflow() pl[SampleSize[SampleRun]] = sc.scalar(10.0, unit="mm") pl[SampleSize[ReferenceRun]] = sc.scalar(10.0, unit="mm") diff --git a/tests/reflectometry/orso_test.py b/tests/reflectometry/orso_test.py index 89feba08..93ee4543 100644 --- a/tests/reflectometry/orso_test.py +++ b/tests/reflectometry/orso_test.py @@ -10,7 +10,13 @@ from ess import amor, reflectometry from ess.amor import data # noqa: F401 from ess.reflectometry import orso -from ess.reflectometry.types import Filename, ReducibleData, ReferenceRun, SampleRun +from ess.reflectometry.types import ( + Filename, + ReducibleData, + ReferenceRun, + RunType, + SampleRun, +) def test_build_orso_data_source(): @@ -20,6 +26,7 @@ def test_build_orso_data_source(): Filename[SampleRun]: amor.data.amor_old_sample_run(), Filename[ReferenceRun]: amor.data.amor_old_reference_run(), }, + constraints={RunType: [SampleRun, ReferenceRun]}, ) pipeline[orso.OrsoInstrument] = None data_source = pipeline.compute(orso.OrsoDataSource)