Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions docs/user-guide/offspec/offspec_reduction.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,8 @@
"header.data_source.measurement = fileio.data_source.Measurement(\n",
" instrument_settings=fileio.data_source.InstrumentSettings(\n",
" incident_angle=fileio.base.Value(\n",
" wf.compute(DetectorData[SampleRun]).coords[\"theta\"].value,\n",
" wf.compute(DetectorData[SampleRun]).coords[\"theta\"].unit\n",
" wf.compute(RawDetector[SampleRun]).coords[\"theta\"].value,\n",
" wf.compute(RawDetector[SampleRun]).coords[\"theta\"].unit\n",
" ),\n",
" wavelength=None,\n",
" polarization=\"unpolarized\",\n",
Expand Down Expand Up @@ -183,8 +183,8 @@
"metadata": {},
"outputs": [],
"source": [
"wf.compute(DetectorData[SampleRun]).hist(tof=50).plot(norm='log') \\\n",
"+ wf.compute(DetectorData[ReferenceRun]).hist(tof=50).plot(norm='log')"
"wf.compute(RawDetector[SampleRun]).hist(tof=50).plot(norm='log') \\\n",
"+ wf.compute(RawDetector[ReferenceRun]).hist(tof=50).plot(norm='log')"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ dependencies = [
"scipp>=24.09.1", # Fixed new hist/bin API
"scippneutron>=24.10.0",
"scippnexus>=24.9.1",
"essreduce>=25.10.2",
"essreduce>=25.11.0",
"pandas>=2.1.2",
]

Expand Down
2 changes: 1 addition & 1 deletion requirements/base.in
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,5 @@ sciline>=24.6.0
scipp>=24.09.1
scippneutron>=24.10.0
scippnexus>=24.9.1
essreduce>=25.10.2
essreduce>=25.11.0
pandas>=2.1.2
9 changes: 4 additions & 5 deletions requirements/base.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# SHA1:8d6f6741be43a1856d2a52aa2ce05d8e8110ee30
# SHA1:ac5d5ed5d7ed7425eb3b92f0cfa128fe6db146b4
#
# This file was generated by pip-compile-multi.
# To update, run:
Expand All @@ -9,7 +9,7 @@ annotated-types==0.7.0
# via pydantic
click==8.3.0
# via dask
cloudpickle==3.1.1
cloudpickle==3.1.2
# via dask
contourpy==1.3.3
# via matplotlib
Expand All @@ -23,7 +23,7 @@ dnspython==2.8.0
# via email-validator
email-validator==2.3.0
# via scippneutron
essreduce==25.10.2
essreduce==25.11.0
# via -r base.in
fonttools==4.60.1
# via matplotlib
Expand Down Expand Up @@ -100,7 +100,7 @@ pyyaml==6.0.3
# via
# dask
# orsopy
sciline==25.8.0
sciline==25.11.1
# via
# -r base.in
# essreduce
Expand Down Expand Up @@ -133,7 +133,6 @@ typing-extensions==4.15.0
# via
# pydantic
# pydantic-core
# sciline
# typing-inspection
typing-inspection==0.4.2
# via pydantic
Expand Down
2 changes: 1 addition & 1 deletion requirements/nightly.in
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ dask>=2022.1.0
python-dateutil
graphviz
orsopy>=1.2
essreduce>=25.10.2
essreduce>=25.11.0
pandas>=2.1.2
pytest>=7.0
pooch>=1.5
Expand Down
7 changes: 3 additions & 4 deletions requirements/nightly.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# SHA1:bc4cf516bc2a50b9d42c8baa03b95ea4d7e54a3d
# SHA1:1e183c3e9d303722f2579385b8d7861f9200b810
#
# This file was generated by pip-compile-multi.
# To update, run:
Expand All @@ -16,7 +16,7 @@ charset-normalizer==3.4.4
# via requests
click==8.3.0
# via dask
cloudpickle==3.1.1
cloudpickle==3.1.2
# via dask
contourpy==1.3.3
# via matplotlib
Expand All @@ -30,7 +30,7 @@ dnspython==2.8.0
# via email-validator
email-validator==2.3.0
# via scippneutron
essreduce==25.10.2
essreduce==25.11.0
# via -r nightly.in
fonttools==4.60.1
# via matplotlib
Expand Down Expand Up @@ -157,7 +157,6 @@ typing-extensions==4.15.0
# via
# pydantic
# pydantic-core
# sciline
# typing-inspection
typing-inspection==0.4.2
# via pydantic
Expand Down
8 changes: 7 additions & 1 deletion src/ess/amor/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,10 @@
DetectorSpatialResolution,
NeXusDetectorName,
Position,
ReferenceRun,
RunType,
SampleRotationOffset,
SampleRun,
)
from . import (
conversions,
Expand Down Expand Up @@ -81,7 +83,11 @@ def AmorWorkflow() -> sciline.Pipeline:
"""
Workflow with default parameters for the Amor PSI instrument.
"""
return sciline.Pipeline(providers=providers, params=default_parameters())
return sciline.Pipeline(
providers=providers,
params=default_parameters(),
constraints={RunType: [SampleRun, ReferenceRun]},
)


__all__ = [
Expand Down
10 changes: 5 additions & 5 deletions src/ess/amor/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,14 @@
from ..reflectometry.types import (
Beamline,
BeamSize,
DetectorData,
DetectorRotation,
Filename,
Measurement,
NeXusComponent,
NeXusDetectorName,
ProtonCurrent,
RawChopper,
RawDetector,
RawSampleRotation,
RunType,
SampleRotation,
Expand Down Expand Up @@ -49,7 +49,7 @@ def load_events(
chopper_separation: ChopperSeparation[RunType],
sample_size: SampleSize[RunType],
beam_size: BeamSize[RunType],
) -> DetectorData[RunType]:
) -> RawDetector[RunType]:
event_data = detector["data"]
if 'event_time_zero' in event_data.coords:
event_data.bins.coords['event_time_zero'] = sc.bins_like(
Expand Down Expand Up @@ -77,7 +77,7 @@ def load_events(
data.coords["chopper_distance"] = chopper_distance
data.coords["sample_size"] = sample_size
data.coords["beam_size"] = beam_size
return DetectorData[RunType](data)
return RawDetector[RunType](data)


def amor_chopper(f: Filename[RunType]) -> RawChopper[RunType]:
Expand Down Expand Up @@ -133,13 +133,13 @@ def load_amor_proton_current(
return pc


def load_beamline_metadata(filename: Filename[SampleRun]) -> Beamline:
def load_beamline_metadata(filename: Filename[RunType]) -> Beamline[RunType]:
return nexus_workflow.load_beamline_metadata_from_nexus(
NeXusFileSpec[SampleRun](filename)
)


def load_measurement_metadata(filename: Filename[SampleRun]) -> Measurement:
def load_measurement_metadata(filename: Filename[RunType]) -> Measurement[RunType]:
return nexus_workflow.load_measurement_metadata_from_nexus(
NeXusFileSpec[SampleRun](filename)
)
Expand Down
4 changes: 2 additions & 2 deletions src/ess/amor/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
from ..reflectometry.types import (
BeamDivergenceLimits,
CoordTransformationGraph,
DetectorData,
ProtonCurrent,
RawDetector,
ReducibleData,
RunType,
WavelengthBins,
Expand All @@ -20,7 +20,7 @@


def add_coords_masks_and_apply_corrections(
da: DetectorData[RunType],
da: RawDetector[RunType],
ylim: YIndexLimits,
zlims: ZIndexLimits,
bdlim: BeamDivergenceLimits,
Expand Down
4 changes: 2 additions & 2 deletions src/ess/estia/corrections.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
from ..reflectometry.types import (
BeamDivergenceLimits,
CoordTransformationGraph,
DetectorData,
ProtonCurrent,
RawDetector,
ReducibleData,
RunType,
WavelengthBins,
Expand All @@ -23,7 +23,7 @@


def add_coords_masks_and_apply_corrections(
da: DetectorData[RunType],
da: RawDetector[RunType],
ylim: YIndexLimits,
zlims: ZIndexLimits,
bdlim: BeamDivergenceLimits,
Expand Down
6 changes: 3 additions & 3 deletions src/ess/estia/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
import scipp as sc

from ..reflectometry.types import (
DetectorData,
Filename,
RawDetector,
RunType,
SampleRotationOffset,
)
Expand All @@ -16,7 +16,7 @@
def load_mcstas_events(
filename: Filename[RunType],
sample_rotation_offset: SampleRotationOffset[RunType],
) -> DetectorData[RunType]:
) -> RawDetector[RunType]:
"""
Load event data from a McStas run and reshape it
to look like what we would expect if
Expand Down Expand Up @@ -96,7 +96,7 @@ def load_mcstas_events(
)
da.bins.coords.pop('L')
da.bins.coords.pop('t')
return DetectorData[RunType](da)
return RawDetector[RunType](da)


providers = ()
6 changes: 5 additions & 1 deletion src/ess/estia/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,10 @@
BeamDivergenceLimits,
DetectorSpatialResolution,
NeXusDetectorName,
ReferenceRun,
RunType,
SampleRotationOffset,
SampleRun,
)
from . import beamline, conversions, corrections, load, maskings, normalization, orso

Expand Down Expand Up @@ -66,7 +68,9 @@ def default_parameters() -> dict:
def EstiaMcStasWorkflow() -> sciline.Pipeline:
"""Workflow for reduction of McStas data for the Estia instrument."""
return sciline.Pipeline(
providers=mcstas_providers, params=mcstas_default_parameters()
providers=mcstas_providers,
params=mcstas_default_parameters(),
constraints={RunType: [SampleRun, ReferenceRun]},
)


Expand Down
4 changes: 2 additions & 2 deletions src/ess/offspec/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@
# Copyright (c) 2025 Scipp contributors (https://github.com/scipp)
import scipp as sc

from ..reflectometry.types import DetectorData, Filename, ReferenceRun, RunType
from ..reflectometry.types import Filename, RawDetector, ReferenceRun, RunType
from .types import CoordTransformationGraph, MonitorData, NeXusMonitorName


def load_offspec_events(
filename: Filename[RunType],
) -> DetectorData[RunType]:
) -> RawDetector[RunType]:
full = sc.io.load_hdf5(filename)
da = full['data']
da.coords['theta'] = full.pop('Theta')[-1].data
Expand Down
3 changes: 2 additions & 1 deletion src/ess/offspec/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import sciline
import scipp as sc
import scippnexus as snx

from ess.reduce.nexus import types as reduce_t

Expand All @@ -21,4 +22,4 @@ class MonitorData(sciline.Scope[RunType, sc.DataArray], sc.DataArray):
""" "Monitor data from the run file, with background subtracted"""


NeXusMonitorName = reduce_t.NeXusName
NeXusMonitorName = reduce_t.NeXusName[snx.NXmonitor]
12 changes: 9 additions & 3 deletions src/ess/offspec/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@

from ..reflectometry import providers as reflectometry_providers
from ..reflectometry.types import (
DetectorData,
RawDetector,
ReducibleData,
ReferenceRun,
RunType,
SampleRun,
WavelengthBins,
)
from . import conversions, load, maskings, normalization
Expand All @@ -33,11 +35,15 @@ def OffspecWorkflow() -> sciline.Pipeline:
*maskings.providers,
*normalization.providers,
)
return sciline.Pipeline(providers=ps, params={NeXusMonitorName: 'monitor2'})
return sciline.Pipeline(
providers=ps,
params={NeXusMonitorName: 'monitor2'},
constraints={RunType: [SampleRun, ReferenceRun]},
)


def add_coords_masks_and_apply_corrections(
da: DetectorData[RunType],
da: RawDetector[RunType],
spectrum_limits: SpectrumLimits,
wlims: WavelengthBins,
wbmin: BackgroundMinWavelength,
Expand Down
2 changes: 1 addition & 1 deletion src/ess/reflectometry/orso.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@


def parse_orso_experiment(
beamline: Beamline, measurement: Measurement
beamline: Beamline[SampleRun], measurement: Measurement[SampleRun]
) -> OrsoExperiment:
"""Parse ORSO experiment metadata from raw NeXus data."""
return OrsoExperiment(
Expand Down
8 changes: 4 additions & 4 deletions src/ess/reflectometry/types.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (c) 2025 Scipp contributors (https://github.com/scipp)
from typing import Any, NewType, TypeVar
from typing import Any, NewType

import sciline
import scipp as sc
Expand All @@ -9,11 +9,11 @@

SampleRun = reduce_t.SampleRun
ReferenceRun = NewType("ReferenceRun", int)
RunType = TypeVar("RunType", ReferenceRun, SampleRun)
RunType = reduce_t.RunType

Beamline = reduce_t.Beamline
CalibratedDetector = reduce_t.CalibratedDetector
DetectorData = reduce_t.DetectorData
EmptyDetector = reduce_t.EmptyDetector
RawDetector = reduce_t.RawDetector
DetectorPositionOffset = reduce_t.DetectorPositionOffset
Filename = reduce_t.Filename
Measurement = reduce_t.Measurement
Expand Down
2 changes: 1 addition & 1 deletion tests/amor/pipeline_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@

@pytest.fixture
def amor_pipeline() -> sciline.Pipeline:
pl = sciline.Pipeline(providers=amor.providers, params=amor.default_parameters())
pl = amor.AmorWorkflow()
pl[SampleSize[SampleRun]] = sc.scalar(10.0, unit="mm")
pl[SampleSize[ReferenceRun]] = sc.scalar(10.0, unit="mm")

Expand Down
Loading
Loading