diff --git a/docs/user-guide/common/beam-center-finder.ipynb b/docs/user-guide/common/beam-center-finder.ipynb
index bc98dff5..70d12ed4 100644
--- a/docs/user-guide/common/beam-center-finder.ipynb
+++ b/docs/user-guide/common/beam-center-finder.ipynb
@@ -77,7 +77,8 @@
"metadata": {},
"outputs": [],
"source": [
- "raw = workflow.compute(RawDetector[SampleRun])['spectrum', :61440]\n",
+ "workflow[BeamCenter] = sc.vector([0, 0, 0], unit='m')\n",
+ "raw = workflow.compute(DetectorData[SampleRun])['spectrum', :61440]\n",
"\n",
"p = isis.plot_flat_detector_xy(raw.hist(), norm='log')\n",
"p.ax.plot(0, 0, '+', color='k', ms=10)\n",
@@ -305,10 +306,10 @@
"outputs": [],
"source": [
"workflow[Filename[SampleRun]] = isis.data.sans2d_tutorial_sample_run()\n",
- "detector = workflow.compute(RawDetector[SampleRun])['spectrum', :61440].assign_masks(\n",
- " masked.masks\n",
- ")\n",
- "workflow[RawDetector[SampleRun]] = detector"
+ "workflow[BeamCenter] = sc.vector([0, 0, 0], unit='m')\n",
+ "detector = workflow.compute(NeXusDetector[SampleRun]).copy()\n",
+ "detector['data'] = detector['data']['spectrum', :61440].assign_masks(masked.masks)\n",
+ "workflow[NeXusDetector[SampleRun]] = detector"
]
},
{
@@ -449,7 +450,7 @@
"workflow[WavelengthBands] = None\n",
"kwargs = dict( # noqa: C408\n",
" workflow=workflow,\n",
- " detector=detector,\n",
+ " detector=detector['data'],\n",
" norm=workflow.compute(NormWavelengthTerm[SampleRun]),\n",
")"
]
diff --git a/pyproject.toml b/pyproject.toml
index b224323e..2b761d42 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -32,7 +32,7 @@ requires-python = ">=3.10"
dependencies = [
"dask",
"graphviz",
- "essreduce>=24.08.0",
+ "essreduce>=24.08.1",
"numpy",
"pandas",
"plopp",
diff --git a/requirements/base.in b/requirements/base.in
index 36208e14..32673b6b 100644
--- a/requirements/base.in
+++ b/requirements/base.in
@@ -4,7 +4,7 @@
# The following was generated by 'tox -e deps', DO NOT EDIT MANUALLY!
dask
graphviz
-essreduce>=24.08.0
+essreduce>=24.08.1
numpy
pandas
plopp
diff --git a/requirements/base.txt b/requirements/base.txt
index bda6cd0a..29bc16e2 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -1,4 +1,4 @@
-# SHA1:df4cf8a4a39fd2d213b55c8c11559ef6e4fe6fe1
+# SHA1:4cb11db62c9d3136130eb212e7bcf639441c13e6
#
# This file is autogenerated by pip-compile-multi
# To update, run:
@@ -23,7 +23,7 @@ dask==2024.8.1
# via -r base.in
decorator==5.1.1
# via ipython
-essreduce==24.8.0
+essreduce==24.8.1
# via -r base.in
exceptiongroup==1.2.2
# via ipython
@@ -120,7 +120,9 @@ pytz==2024.1
pyyaml==6.0.2
# via dask
sciline==24.6.2
- # via -r base.in
+ # via
+ # -r base.in
+ # essreduce
scipp==24.8.0
# via
# -r base.in
diff --git a/src/ess/isissans/__init__.py b/src/ess/isissans/__init__.py
index ce61a4e8..440d8732 100644
--- a/src/ess/isissans/__init__.py
+++ b/src/ess/isissans/__init__.py
@@ -3,9 +3,8 @@
import importlib.metadata
-from . import components, general, io, sans2d, zoom
-from .components import DetectorBankOffset, MonitorOffset, SampleOffset
-from .general import default_parameters
+from . import general, io, sans2d, zoom
+from .general import default_parameters, SampleOffset, MonitorOffset, DetectorBankOffset
from .io import CalibrationFilename
from .visualization import plot_flat_detector_xy
@@ -14,17 +13,17 @@
except importlib.metadata.PackageNotFoundError:
__version__ = "0.0.0"
-providers = components.providers + general.providers + io.providers
+providers = general.providers
del importlib
__all__ = [
'CalibrationFilename',
'DetectorBankOffset',
- 'io',
'MonitorOffset',
- 'providers',
'SampleOffset',
+ 'io',
+ 'providers',
'plot_flat_detector_xy',
'sans2d',
'default_parameters',
diff --git a/src/ess/isissans/components.py b/src/ess/isissans/components.py
deleted file mode 100644
index 8141aac5..00000000
--- a/src/ess/isissans/components.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# SPDX-License-Identifier: BSD-3-Clause
-# Copyright (c) 2024 Scipp contributors (https://github.com/scipp)
-from typing import NewType
-
-import sciline
-import scipp as sc
-
-from ..sans.types import (
- BeamCenter,
- CalibratedDetector,
- MonitorType,
- RawDetector,
- RawMonitor,
- RawMonitorData,
- RunType,
- ScatteringRunType,
-)
-
-
-class MonitorOffset(sciline.Scope[MonitorType, sc.Variable], sc.Variable):
- """Offset for monitor position"""
-
-
-SampleOffset = NewType('SampleOffset', sc.Variable)
-DetectorBankOffset = NewType('DetectorBankOffset', sc.Variable)
-
-
-def apply_beam_center(
- data: RawDetector[ScatteringRunType], beam_center: BeamCenter
-) -> CalibratedDetector[ScatteringRunType]:
- """Apply user offsets to raw data.
-
- Parameters
- ----------
- data:
- Raw data.
- sample_offset:
- Sample offset.
- detector_bank_offset:
- Detector bank offset.
- """
- return CalibratedDetector[ScatteringRunType](
- data.assign_coords(position=data.coords['position'] - beam_center)
- )
-
-
-def apply_component_user_offsets_to_raw_monitor(
- monitor_data: RawMonitor[RunType, MonitorType],
- monitor_offset: MonitorOffset[MonitorType],
-) -> RawMonitorData[RunType, MonitorType]:
- """Apply user offsets to raw monitor.
- Parameters
- ----------
- monitor_data:
- Raw monitor data.
- monitor_offset:
- Offset to apply to monitor position.
- """
- monitor_data = monitor_data.copy(deep=False)
- pos = monitor_data.coords['position']
- monitor_data.coords['position'] = pos + monitor_offset.to(unit=pos.unit, copy=False)
- return RawMonitorData[RunType, MonitorType](monitor_data)
-
-
-providers = (
- apply_beam_center,
- apply_component_user_offsets_to_raw_monitor,
-)
diff --git a/src/ess/isissans/data.py b/src/ess/isissans/data.py
index 6d752927..589552ea 100644
--- a/src/ess/isissans/data.py
+++ b/src/ess/isissans/data.py
@@ -94,7 +94,9 @@ def zoom_tutorial_calibration() -> Filename[CalibrationFilename]:
def zoom_tutorial_sample_run() -> Filename[SampleRun]:
- return _zoom_registry.get_path('ZOOM00034786.nxs.h5.zip', unzip=True)[0]
+ return Filename[SampleRun](
+ _zoom_registry.get_path('ZOOM00034786.nxs.h5.zip', unzip=True)[0]
+ )
def zoom_tutorial_empty_beam_run() -> Filename[EmptyBeamRun]:
diff --git a/src/ess/isissans/general.py b/src/ess/isissans/general.py
index 870061c5..ecf1f5b5 100644
--- a/src/ess/isissans/general.py
+++ b/src/ess/isissans/general.py
@@ -4,45 +4,68 @@
Providers for the ISIS instruments.
"""
+from typing import NewType
+
+import sciline
import scipp as sc
-from ..sans.types import (
+from ess.sans.types import (
+ BeamCenter,
CalibratedDetector,
+ CalibratedMonitor,
CorrectForGravity,
DetectorData,
DetectorIDs,
DetectorPixelShape,
+ DetectorPositionOffset,
DimsToKeep,
Incident,
LabFrameTransform,
+ MonitorData,
+ MonitorPositionOffset,
MonitorType,
+ NeXusDetector,
+ NeXusMonitor,
NeXusMonitorName,
NonBackgroundWavelengthRange,
- RawDetector,
- RawMonitor,
- RawMonitorData,
RunNumber,
RunTitle,
RunType,
+ SamplePosition,
SampleRun,
ScatteringRunType,
+ SourcePosition,
TofData,
TofMonitor,
Transmission,
WavelengthBands,
WavelengthMask,
)
-from .components import DetectorBankOffset, MonitorOffset, SampleOffset
+
from .io import LoadedFileContents
from .mantidio import Period
+class MonitorOffset(sciline.Scope[MonitorType, sc.Variable], sc.Variable):
+ """
+ Offset for monitor position for all runs.
+ """
+
+
+DetectorBankOffset = NewType('DetectorBankOffset', sc.Variable)
+SampleOffset = NewType('SampleOffset', sc.Variable)
+
+
def default_parameters() -> dict:
return {
CorrectForGravity: False,
DimsToKeep: (),
- MonitorOffset[Incident]: MonitorOffset(sc.vector([0, 0, 0], unit='m')),
- MonitorOffset[Transmission]: MonitorOffset(sc.vector([0, 0, 0], unit='m')),
+ MonitorOffset[Incident]: MonitorOffset[Incident](
+ sc.vector([0, 0, 0], unit='m')
+ ),
+ MonitorOffset[Transmission]: MonitorOffset[Transmission](
+ sc.vector([0, 0, 0], unit='m')
+ ),
DetectorBankOffset: DetectorBankOffset(sc.vector([0, 0, 0], unit='m')),
SampleOffset: SampleOffset(sc.vector([0, 0, 0], unit='m')),
NonBackgroundWavelengthRange: None,
@@ -52,45 +75,70 @@ def default_parameters() -> dict:
}
-def get_detector_data(
- dg: LoadedFileContents[RunType],
- sample_offset: SampleOffset,
- detector_bank_offset: DetectorBankOffset,
-) -> RawDetector[RunType]:
+def to_detector_position_offset(
+ global_offset: DetectorBankOffset, beam_center: BeamCenter
+) -> DetectorPositionOffset[RunType]:
+ return DetectorPositionOffset[RunType](global_offset - beam_center)
+
+
+def to_monitor_position_offset(
+ global_offset: MonitorOffset[MonitorType],
+) -> MonitorPositionOffset[RunType, MonitorType]:
+ return MonitorPositionOffset[RunType, MonitorType](global_offset)
+
+
+def get_source_position(dg: LoadedFileContents[RunType]) -> SourcePosition[RunType]:
+ """Get source position from raw data."""
+ return SourcePosition[RunType](dg['data'].coords['source_position'])
+
+
+def get_sample_position(
+ dg: LoadedFileContents[RunType], offset: SampleOffset
+) -> SamplePosition[RunType]:
+ """Get sample position from raw data and apply user offset."""
+ return SamplePosition[RunType](
+ dg['data'].coords['sample_position'] + offset.to(unit='m')
+ )
+
+
+def get_detector_data(dg: LoadedFileContents[RunType]) -> NeXusDetector[RunType]:
"""Get detector data and apply user offsets to raw data.
Parameters
----------
dg:
Data loaded with Mantid and converted to Scipp.
- sample_offset:
- Sample offset.
- detector_bank_offset:
- Detector bank offset.
"""
- data = dg['data']
- sample_pos = data.coords['sample_position']
- sample_pos = sample_pos + sample_offset.to(unit=sample_pos.unit, copy=False)
- pos = data.coords['position']
- pos = pos + detector_bank_offset.to(unit=pos.unit, copy=False)
- return RawDetector[RunType](
- dg['data'].assign_coords(position=pos, sample_position=sample_pos)
+ # The generic NeXus workflow will try to extract 'data' from this, which is exactly
+ # what we also have in the Mantid data. We use the generic workflow since it also
+ # applies offsets, etc.
+ return NeXusDetector[RunType](dg)
+
+
+def get_monitor_data(
+ dg: LoadedFileContents[RunType], nexus_name: NeXusMonitorName[MonitorType]
+) -> NeXusMonitor[RunType, MonitorType]:
+ # The generic NeXus workflow will try to extract 'data' from this, which is exactly
+ # what we also have in the Mantid data. We use the generic workflow since it also
+ # applies offsets, etc.
+ monitor = dg['monitors'][nexus_name]['data']
+ return NeXusMonitor[RunType, MonitorType](
+ sc.DataGroup(data=monitor, position=monitor.coords['position'])
)
-def assemble_detector_data(
+def dummy_assemble_detector_data(
detector: CalibratedDetector[RunType],
) -> DetectorData[RunType]:
"""Dummy assembly of detector data, detector already contains neutron data."""
return DetectorData[RunType](detector)
-def get_monitor_data(
- dg: LoadedFileContents[RunType], nexus_name: NeXusMonitorName[MonitorType]
-) -> RawMonitor[RunType, MonitorType]:
- # See https://github.com/scipp/sciline/issues/52 why copy needed
- mon = dg['monitors'][nexus_name]['data'].copy()
- return RawMonitor[RunType, MonitorType](mon)
+def dummy_assemble_monitor_data(
+ monitor: CalibratedMonitor[RunType, MonitorType],
+) -> MonitorData[RunType, MonitorType]:
+ """Dummy assembly of monitor data, monitor already contains neutron data."""
+ return MonitorData[RunType, MonitorType](monitor)
def data_to_tof(
@@ -102,7 +150,7 @@ def data_to_tof(
def monitor_to_tof(
- da: RawMonitorData[RunType, MonitorType],
+ da: MonitorData[RunType, MonitorType],
) -> TofMonitor[RunType, MonitorType]:
"""Dummy conversion of monitor data to time-of-flight data.
The monitor data already has a time-of-flight coordinate."""
@@ -166,7 +214,12 @@ def get_detector_ids_from_sample_run(data: TofData[SampleRun]) -> DetectorIDs:
providers = (
- assemble_detector_data,
+ dummy_assemble_detector_data,
+ dummy_assemble_monitor_data,
+ to_detector_position_offset,
+ to_monitor_position_offset,
+ get_source_position,
+ get_sample_position,
get_detector_data,
get_detector_ids_from_sample_run,
get_monitor_data,
diff --git a/src/ess/isissans/io.py b/src/ess/isissans/io.py
index c56f1493..0b6d4cc0 100644
--- a/src/ess/isissans/io.py
+++ b/src/ess/isissans/io.py
@@ -14,8 +14,6 @@
DirectBeam,
DirectBeamFilename,
Filename,
- MaskedDetectorIDs,
- PixelMaskFilename,
RunType,
SampleRun,
TransmissionRun,
@@ -24,46 +22,6 @@
CalibrationFilename = NewType('CalibrationFilename', str)
-def read_xml_detector_masking(filename: PixelMaskFilename) -> MaskedDetectorIDs:
- """Read a pixel mask from an XML file.
-
- The format is as follows, where the detids are inclusive ranges of detector IDs:
-
- .. code-block:: xml
-
-
-
-
- 1400203-1400218,1401199,1402190-1402223
-
-
-
- Parameters
- ----------
- filename:
- Path to the XML file.
- """
- import xml.etree.ElementTree as ET # nosec
-
- tree = ET.parse(filename) # noqa: S314
- root = tree.getroot()
-
- masked_detids = []
- for group in root.findall('group'):
- for detids in group.findall('detids'):
- for detid in detids.text.split(','):
- detid = detid.strip()
- if '-' in detid:
- start, stop = detid.split('-')
- masked_detids += list(range(int(start), int(stop) + 1))
- else:
- masked_detids.append(int(detid))
-
- return MaskedDetectorIDs(
- sc.array(dims=['detector_id'], values=masked_detids, unit=None, dtype='int32')
- )
-
-
class LoadedFileContents(sciline.Scope[RunType, sc.DataGroup], sc.DataGroup):
"""Contents of a loaded file."""
@@ -92,6 +50,3 @@ def transmission_from_background_run(
Use transmission from a background run, instead of dedicated run.
"""
return LoadedFileContents[TransmissionRun[BackgroundRun]](data)
-
-
-providers = (read_xml_detector_masking,)
diff --git a/src/ess/isissans/sans2d.py b/src/ess/isissans/sans2d.py
index 05ad93a8..b8c377c5 100644
--- a/src/ess/isissans/sans2d.py
+++ b/src/ess/isissans/sans2d.py
@@ -4,9 +4,10 @@
import sciline
import scipp as sc
+from ess.reduce.nexus.generic_workflow import GenericNeXusWorkflow
from ess.sans import providers as sans_providers
-from ess.sans.types import DetectorMasks, RawDetector, SampleRun
+from ess.sans.types import BeamCenter, CalibratedDetector, DetectorMasks, SampleRun
from .general import default_parameters
from .io import load_tutorial_direct_beam, load_tutorial_run
@@ -23,27 +24,33 @@
"""Sample holder mask"""
-# It may make more sense to depend on CalibratedDetector here, but the current
-# x and y limits are before setting the beam center, so we use RawDetector
-def detector_edge_mask(sample: RawDetector[SampleRun]) -> DetectorEdgeMask:
- mask_edges = (
- sc.abs(sample.coords['position'].fields.x) > sc.scalar(0.48, unit='m')
- ) | (sc.abs(sample.coords['position'].fields.y) > sc.scalar(0.45, unit='m'))
+def detector_edge_mask(
+ beam_center: BeamCenter, sample: CalibratedDetector[SampleRun]
+) -> DetectorEdgeMask:
+ # These values were determined by hand before the beam center was available.
+ # We therefore undo the shift introduced by the beam center.
+ raw_pos = sample.coords['position'] + beam_center
+ mask_edges = (sc.abs(raw_pos.fields.x) > sc.scalar(0.48, unit='m')) | (
+ sc.abs(raw_pos.fields.y) > sc.scalar(0.45, unit='m')
+ )
return DetectorEdgeMask(mask_edges)
-# It may make more sense to depend on CalibratedDetector here, but the current
-# x and y limits are before setting the beam center, so we use RawDetector
def sample_holder_mask(
- sample: RawDetector[SampleRun], low_counts_threshold: LowCountThreshold
+ beam_center: BeamCenter,
+ sample: CalibratedDetector[SampleRun],
+ low_counts_threshold: LowCountThreshold,
) -> SampleHolderMask:
+ # These values were determined by hand before the beam center was available.
+ # We therefore undo the shift introduced by the beam center.
+ raw_pos = sample.coords['position'] + beam_center
summed = sample.hist()
holder_mask = (
(summed.data < low_counts_threshold)
- & (sample.coords['position'].fields.x > sc.scalar(0, unit='m'))
- & (sample.coords['position'].fields.x < sc.scalar(0.42, unit='m'))
- & (sample.coords['position'].fields.y < sc.scalar(0.05, unit='m'))
- & (sample.coords['position'].fields.y > sc.scalar(-0.15, unit='m'))
+ & (raw_pos.fields.x > sc.scalar(0, unit='m'))
+ & (raw_pos.fields.x < sc.scalar(0.42, unit='m'))
+ & (raw_pos.fields.y < sc.scalar(0.05, unit='m'))
+ & (raw_pos.fields.y > sc.scalar(-0.15, unit='m'))
)
return SampleHolderMask(holder_mask)
@@ -79,9 +86,14 @@ def Sans2dWorkflow() -> sciline.Pipeline:
"""Create Sans2d workflow with default parameters."""
from . import providers as isis_providers
- params = default_parameters()
- sans2d_providers = sans_providers + isis_providers + mantid_providers + providers
- return sciline.Pipeline(providers=sans2d_providers, params=params)
+ # Note that the actual NeXus loading in this workflow will not be used for the
+ # ISIS files, the providers inserted below will replace those steps.
+ workflow = GenericNeXusWorkflow()
+ for provider in sans_providers + isis_providers + mantid_providers + providers:
+ workflow.insert(provider)
+ for key, param in default_parameters().items():
+ workflow[key] = param
+ return workflow
def Sans2dTutorialWorkflow() -> sciline.Pipeline:
diff --git a/src/ess/isissans/zoom.py b/src/ess/isissans/zoom.py
index 5a72336d..59c2d0a5 100644
--- a/src/ess/isissans/zoom.py
+++ b/src/ess/isissans/zoom.py
@@ -1,11 +1,13 @@
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (c) 2024 Scipp contributors (https://github.com/scipp)
import sciline
+from ess.reduce.nexus.generic_workflow import GenericNeXusWorkflow
from ess.sans import providers as sans_providers
+from ess.sans.io import read_xml_detector_masking
from .general import default_parameters
-from .io import load_tutorial_direct_beam, load_tutorial_run, read_xml_detector_masking
+from .io import load_tutorial_direct_beam, load_tutorial_run
from .mantidio import providers as mantid_providers
@@ -25,9 +27,13 @@ def ZoomWorkflow() -> sciline.Pipeline:
set_mantid_log_level()
- params = default_parameters()
- zoom_providers = sans_providers + isis_providers + mantid_providers
- workflow = sciline.Pipeline(providers=zoom_providers, params=params)
+ # Note that the actual NeXus loading in this workflow will not be used for the
+ # ISIS files, the providers inserted below will replace those steps.
+ workflow = GenericNeXusWorkflow()
+ for provider in sans_providers + isis_providers + mantid_providers:
+ workflow.insert(provider)
+ for key, param in default_parameters().items():
+ workflow[key] = param
workflow.insert(read_xml_detector_masking)
return workflow
diff --git a/src/ess/loki/__init__.py b/src/ess/loki/__init__.py
index c6d68178..775d9a11 100644
--- a/src/ess/loki/__init__.py
+++ b/src/ess/loki/__init__.py
@@ -3,7 +3,7 @@
import importlib.metadata
-from . import general, io
+from . import general
from .general import LokiAtLarmorWorkflow, default_parameters
try:
@@ -11,14 +11,10 @@
except importlib.metadata.PackageNotFoundError:
__version__ = "0.0.0"
-providers = general.providers + io.providers
-
del importlib
__all__ = [
'general',
- 'io',
- 'providers',
'default_parameters',
'LokiAtLarmorWorkflow',
]
diff --git a/src/ess/loki/general.py b/src/ess/loki/general.py
index f5840519..04557eb6 100644
--- a/src/ess/loki/general.py
+++ b/src/ess/loki/general.py
@@ -6,37 +6,27 @@
import sciline
import scipp as sc
-from ess.reduce import nexus
+from ess.reduce.nexus.generic_workflow import GenericNeXusWorkflow
from ess.sans import providers as sans_providers
+from ess.sans.io import read_xml_detector_masking
-from ..sans.common import gravity_vector
from ..sans.types import (
- BeamCenter,
- CalibratedDetector,
CorrectForGravity,
+ DetectorBankSizes,
DetectorData,
- DetectorEventData,
DetectorPixelShape,
DimsToKeep,
Incident,
LabFrameTransform,
- MonitorEventData,
+ MonitorData,
MonitorType,
NeXusDetector,
- NeXusMonitor,
NeXusMonitorName,
NonBackgroundWavelengthRange,
PixelShapePath,
- RawDetector,
- RawMonitor,
- RawMonitorData,
- RawSample,
- RawSource,
RunType,
- SamplePosition,
ScatteringRunType,
- SourcePosition,
TofData,
TofMonitor,
TransformationPath,
@@ -44,12 +34,16 @@
WavelengthBands,
WavelengthMask,
)
-from .io import dummy_load_sample
+
+DETECTOR_BANK_SIZES = {
+ 'larmor_detector': {'layer': 4, 'tube': 32, 'straw': 7, 'pixel': 512}
+}
def default_parameters() -> dict:
return {
CorrectForGravity: False,
+ DetectorBankSizes: DETECTOR_BANK_SIZES,
DimsToKeep: (),
NeXusMonitorName[Incident]: 'monitor_1',
NeXusMonitorName[Transmission]: 'monitor_2',
@@ -61,117 +55,6 @@ def default_parameters() -> dict:
}
-def LokiAtLarmorWorkflow() -> sciline.Pipeline:
- """
- Workflow with default parameters for Loki test at Larmor.
-
- This version of the Loki workflow:
-
- - Uses ISIS XML files to define masks.
- - Sets a dummy sample position [0,0,0] since files do not contain this information.
-
- Returns
- -------
- :
- Loki workflow as a sciline.Pipeline
- """
- from ess.isissans.io import read_xml_detector_masking
-
- from . import providers as loki_providers
-
- params = default_parameters()
- loki_providers = sans_providers + loki_providers
- workflow = sciline.Pipeline(providers=loki_providers, params=params)
- workflow.insert(read_xml_detector_masking)
- # No sample information in the Loki@Larmor files, so we use a dummy sample provider
- workflow.insert(dummy_load_sample)
- return workflow
-
-
-DETECTOR_BANK_RESHAPING = {
- 'larmor_detector': lambda x: x.fold(
- dim='detector_number', sizes={'layer': 4, 'tube': 32, 'straw': 7, 'pixel': 512}
- )
-}
-
-
-def get_source_position(
- raw_source: RawSource[RunType],
-) -> SourcePosition[RunType]:
- return SourcePosition[RunType](raw_source['position'])
-
-
-def get_sample_position(
- raw_sample: RawSample[RunType],
-) -> SamplePosition[RunType]:
- return SamplePosition[RunType](raw_sample['position'])
-
-
-def get_detector_data(
- detector: NeXusDetector[ScatteringRunType],
-) -> RawDetector[ScatteringRunType]:
- da = nexus.extract_detector_data(detector)
- if (reshape := DETECTOR_BANK_RESHAPING.get(detector['detector_name'])) is not None:
- da = reshape(da)
- return RawDetector[ScatteringRunType](da)
-
-
-def calibrate_detector(
- detector: RawDetector[ScatteringRunType],
- beam_center: BeamCenter,
- source_position: SourcePosition[ScatteringRunType],
- sample_position: SamplePosition[ScatteringRunType],
-) -> CalibratedDetector[ScatteringRunType]:
- return CalibratedDetector[ScatteringRunType](
- detector.assign_coords(
- position=detector.coords['position'] - beam_center,
- source_position=source_position,
- sample_position=sample_position,
- gravity=gravity_vector(),
- )
- )
-
-
-def get_monitor_data(
- monitor: NeXusMonitor[RunType, MonitorType],
- source_position: SourcePosition[RunType],
-) -> RawMonitor[RunType, MonitorType]:
- return RawMonitor[RunType, MonitorType](
- nexus.extract_monitor_data(monitor).assign_coords(
- position=monitor['position'], source_position=source_position
- )
- )
-
-
-def _add_variances(da: sc.DataArray) -> sc.DataArray:
- out = da.copy(deep=False)
- if out.bins is not None:
- content = out.bins.constituents['data']
- if content.variances is None:
- content.variances = content.values
- return out
-
-
-def assemble_detector_data(
- detector: CalibratedDetector[ScatteringRunType],
- event_data: DetectorEventData[ScatteringRunType],
-) -> DetectorData[ScatteringRunType]:
- grouped = nexus.group_event_data(
- event_data=event_data, detector_number=detector.coords['detector_number']
- )
- detector.data = grouped.data
- return DetectorData[ScatteringRunType](_add_variances(da=detector))
-
-
-def assemble_monitor_data(
- monitor_data: RawMonitor[RunType, MonitorType],
- event_data: MonitorEventData[RunType, MonitorType],
-) -> RawMonitorData[RunType, MonitorType]:
- meta = monitor_data.drop_coords('event_time_zero')
- da = event_data.assign_coords(meta.coords).assign_masks(meta.masks)
- return RawMonitorData[RunType, MonitorType](_add_variances(da=da))
-
-
def _convert_to_tof(da: sc.DataArray) -> sc.DataArray:
da.bins.coords['tof'] = da.bins.coords.pop('event_time_offset')
if 'event_time_zero' in da.dims:
@@ -186,7 +69,7 @@ def data_to_tof(
def monitor_to_tof(
- da: RawMonitorData[RunType, MonitorType],
+ da: MonitorData[RunType, MonitorType],
) -> TofMonitor[RunType, MonitorType]:
return TofMonitor[RunType, MonitorType](_convert_to_tof(da))
@@ -205,16 +88,32 @@ def detector_lab_frame_transform(
return LabFrameTransform[ScatteringRunType](detector[transform_path])
-providers = (
+loki_providers = (
detector_pixel_shape,
detector_lab_frame_transform,
- calibrate_detector,
- get_detector_data,
- get_monitor_data,
- get_sample_position,
- get_source_position,
- assemble_detector_data,
- assemble_monitor_data,
data_to_tof,
monitor_to_tof,
)
+
+
+def LokiAtLarmorWorkflow() -> sciline.Pipeline:
+ """
+ Workflow with default parameters for Loki test at Larmor.
+
+ This version of the Loki workflow:
+
+ - Uses ISIS XML files to define masks.
+ - Sets a dummy sample position [0,0,0] since files do not contain this information.
+
+ Returns
+ -------
+ :
+ Loki workflow as a sciline.Pipeline
+ """
+ workflow = GenericNeXusWorkflow()
+ for provider in sans_providers + loki_providers:
+ workflow.insert(provider)
+ for key, param in default_parameters().items():
+ workflow[key] = param
+ workflow.insert(read_xml_detector_masking)
+ return workflow
diff --git a/src/ess/loki/io.py b/src/ess/loki/io.py
deleted file mode 100644
index f5b896cf..00000000
--- a/src/ess/loki/io.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# SPDX-License-Identifier: BSD-3-Clause
-# Copyright (c) 2023 Scipp contributors (https://github.com/scipp)
-"""
-Loading and merging of LoKI data.
-"""
-
-import scipp as sc
-from ess.reduce import nexus
-
-from ess.sans.types import (
- DetectorEventData,
- Filename,
- MonitorEventData,
- MonitorType,
- NeXusDetector,
- NeXusDetectorName,
- NeXusMonitor,
- NeXusMonitorName,
- RawSample,
- RawSource,
- RunType,
-)
-
-
-def load_nexus_sample(file_path: Filename[RunType]) -> RawSample[RunType]:
- return RawSample[RunType](nexus.load_sample(file_path))
-
-
-def dummy_load_sample(file_path: Filename[RunType]) -> RawSample[RunType]:
- return RawSample[RunType](
- sc.DataGroup({'position': sc.vector(value=[0, 0, 0], unit='m')})
- )
-
-
-def load_nexus_source(file_path: Filename[RunType]) -> RawSource[RunType]:
- return RawSource[RunType](nexus.load_source(file_path))
-
-
-def load_nexus_detector(
- file_path: Filename[RunType], detector_name: NeXusDetectorName
-) -> NeXusDetector[RunType]:
- # Events will be loaded later. Should we set something else as data instead, or
- # use different NeXus definitions to completely bypass the (empty) event load?
- dg = nexus.load_detector(
- file_path=file_path,
- detector_name=detector_name,
- selection={'event_time_zero': slice(0, 0)},
- )
- # The name is required later, e.g., for determining logical detector shape
- dg['detector_name'] = detector_name
- return NeXusDetector[RunType](dg)
-
-
-def load_nexus_monitor(
- file_path: Filename[RunType], monitor_name: NeXusMonitorName[MonitorType]
-) -> NeXusMonitor[RunType, MonitorType]:
- return NeXusMonitor[RunType, MonitorType](
- nexus.load_monitor(
- file_path=file_path,
- monitor_name=monitor_name,
- selection={'event_time_zero': slice(0, 0)},
- )
- )
-
-
-def load_detector_event_data(
- file_path: Filename[RunType], detector_name: NeXusDetectorName
-) -> DetectorEventData[RunType]:
- da = nexus.load_event_data(file_path=file_path, component_name=detector_name)
- return DetectorEventData[RunType](da)
-
-
-def load_monitor_event_data(
- file_path: Filename[RunType], monitor_name: NeXusMonitorName[MonitorType]
-) -> MonitorEventData[RunType, MonitorType]:
- da = nexus.load_event_data(file_path=file_path, component_name=monitor_name)
- return MonitorEventData[RunType, MonitorType](da)
-
-
-providers = (
- load_nexus_detector,
- load_nexus_monitor,
- load_nexus_sample,
- load_nexus_source,
- load_detector_event_data,
- load_monitor_event_data,
-)
-"""Providers for loading single files."""
diff --git a/src/ess/loki/workflow.py b/src/ess/loki/workflow.py
index 5a6aea64..45ee54f2 100644
--- a/src/ess/loki/workflow.py
+++ b/src/ess/loki/workflow.py
@@ -3,16 +3,9 @@
import sciline
import scipp as sc
import scippnexus as snx
-from ess.loki.general import (
- assemble_monitor_data,
- get_monitor_data,
- get_source_position,
- monitor_to_tof,
-)
-from ess.loki.io import load_nexus_monitor, load_nexus_source
+from ess import loki
from ess.reduce.nexus.json_nexus import JSONGroup
-from ess.sans.conversions import monitor_to_wavelength, sans_monitor
from ess.sans.types import (
Filename,
Incident,
@@ -74,30 +67,11 @@ def _build_pipeline(self) -> sciline.Pipeline:
"""
# Wavelength binning parameters
- wavelength_min = sc.scalar(1.0, unit="angstrom")
- wavelength_max = sc.scalar(13.0, unit="angstrom")
- n_wavelength_bins = 50
-
- providers = (
- load_nexus_monitor,
- load_nexus_source,
- get_source_position,
- get_monitor_data,
- assemble_monitor_data,
- monitor_to_tof,
- sans_monitor,
- monitor_to_wavelength,
- _hist_monitor_wavelength,
- )
-
- params = {
- NeXusMonitorName[Incident]: "monitor_1",
- NeXusMonitorName[Transmission]: "monitor_2",
- WavelengthBins: sc.linspace(
- "wavelength", wavelength_min, wavelength_max, n_wavelength_bins + 1
- ),
- }
- workflow = sciline.Pipeline(providers, params=params)
+ workflow = loki.LokiAtLarmorWorkflow()
+ workflow.insert(_hist_monitor_wavelength)
+ workflow[NeXusMonitorName[Incident]] = "monitor_1"
+ workflow[NeXusMonitorName[Transmission]] = "monitor_2"
+ workflow[WavelengthBins] = sc.linspace("wavelength", 1.0, 13.0, 50 + 1)
return workflow
def __call__(self, group: JSONGroup) -> dict[str, sc.DataArray]:
diff --git a/src/ess/sans/__init__.py b/src/ess/sans/__init__.py
index 928672d6..33d39233 100644
--- a/src/ess/sans/__init__.py
+++ b/src/ess/sans/__init__.py
@@ -33,6 +33,7 @@
*i_of_q.providers,
*masking.providers,
*normalization.providers,
+ common.beam_center_to_detector_position_offset,
)
"""
List of providers for setting up a Sciline pipeline.
diff --git a/src/ess/sans/beam_center_finder.py b/src/ess/sans/beam_center_finder.py
index 367cb8b7..26c42962 100644
--- a/src/ess/sans/beam_center_finder.py
+++ b/src/ess/sans/beam_center_finder.py
@@ -13,12 +13,13 @@
from .logging import get_logger
from .types import (
BeamCenter,
+ DetectorBankSizes,
DimsToKeep,
IofQ,
MaskedData,
+ NeXusDetector,
NormWavelengthTerm,
QBins,
- RawDetector,
ReturnEvents,
SampleRun,
WavelengthBands,
@@ -168,7 +169,9 @@ def _iofq_in_quadrants(
for i, quad in enumerate(quadrants):
# Select pixels based on phi
sel = (phi >= phi_bins[i]) & (phi < phi_bins[i + 1])
- workflow[RawDetector[SampleRun]] = detector[sel]
+ # The beam center is applied when computing CalibratedDetector, set quadrant
+ # *before* that step.
+ workflow[NeXusDetector[SampleRun]] = sc.DataGroup(data=detector[sel])
# MaskedData would be computed automatically, but we did it above already
workflow[MaskedData[SampleRun]] = calibrated[sel]
workflow[NormWavelengthTerm[SampleRun]] = (
@@ -358,29 +361,22 @@ def beam_center_from_iofq(
logger.info('Using tolerance: %s', tolerance)
keys = (
- RawDetector[SampleRun],
+ NeXusDetector[SampleRun],
MaskedData[SampleRun],
NormWavelengthTerm[SampleRun],
ElasticCoordTransformGraph,
)
+ workflow = workflow.copy()
+ # Avoid reshape of detector, which would break boolean-indexing by cost function
+ workflow[DetectorBankSizes] = {}
results = workflow.compute(keys)
- detector = results[RawDetector[SampleRun]]
+ detector = results[NeXusDetector[SampleRun]]['data']
data = results[MaskedData[SampleRun]]
norm = results[NormWavelengthTerm[SampleRun]]
graph = results[ElasticCoordTransformGraph]
- # Flatten positions dim which is required during the iterations for slicing with a
- # boolean mask
- pos_dims = detector.coords['position'].dims
- new_dim = uuid.uuid4().hex
- detector = detector.flatten(dims=pos_dims, to=new_dim)
- dims_to_flatten = [dim for dim in norm.dims if dim in pos_dims]
- if dims_to_flatten:
- norm = norm.flatten(dims=dims_to_flatten, to=new_dim)
-
- workflow = workflow.copy()
# Avoid reloading the detector
- workflow[RawDetector[SampleRun]] = detector
+ workflow[NeXusDetector[SampleRun]] = sc.DataGroup(data=detector)
workflow[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.upper_bound
workflow[ReturnEvents] = False
workflow[DimsToKeep] = ()
diff --git a/src/ess/sans/common.py b/src/ess/sans/common.py
index debfe23e..e3de665c 100644
--- a/src/ess/sans/common.py
+++ b/src/ess/sans/common.py
@@ -6,6 +6,8 @@
import scipp as sc
from scipp.constants import g
+from .types import BeamCenter, DetectorPositionOffset, RunType
+
def gravity_vector() -> sc.Variable:
"""
@@ -89,3 +91,10 @@ def mask_range(
else:
out.masks[name] = mask_values
return out
+
+
+def beam_center_to_detector_position_offset(
+ beam_center: BeamCenter,
+) -> DetectorPositionOffset[RunType]:
+ """Convert beam center to detector position offset for all runs."""
+ return DetectorPositionOffset[RunType](-beam_center)
diff --git a/src/ess/sans/io.py b/src/ess/sans/io.py
index 85a5323f..1dd6d5fc 100644
--- a/src/ess/sans/io.py
+++ b/src/ess/sans/io.py
@@ -5,7 +5,14 @@
import scippnexus as snx
from scippnexus.application_definitions import nxcansas
-from .types import BackgroundSubtractedIofQ, OutFilename, RunNumber, RunTitle
+from .types import (
+ BackgroundSubtractedIofQ,
+ MaskedDetectorIDs,
+ OutFilename,
+ PixelMaskFilename,
+ RunNumber,
+ RunTitle,
+)
def save_background_subtracted_iofq(
@@ -25,3 +32,43 @@ def save_background_subtracted_iofq(
with snx.File(out_filename, 'w') as f:
f['sasentry'] = nxcansas.SASentry(title=run_title, run=run_number)
f['sasentry']['sasdata'] = nxcansas.SASdata(da, Q_variances='resolutions')
+
+
+def read_xml_detector_masking(filename: PixelMaskFilename) -> MaskedDetectorIDs:
+ """Read a pixel mask from an ISIS XML file.
+
+ The format is as follows, where the detids are inclusive ranges of detector IDs:
+
+ .. code-block:: xml
+
+
+
+
+ 1400203-1400218,1401199,1402190-1402223
+
+
+
+ Parameters
+ ----------
+ filename:
+ Path to the XML file.
+ """
+ import xml.etree.ElementTree as ET # nosec
+
+ tree = ET.parse(filename) # noqa: S314
+ root = tree.getroot()
+
+ masked_detids = []
+ for group in root.findall('group'):
+ for detids in group.findall('detids'):
+ for detid in detids.text.split(','):
+ detid = detid.strip()
+ if '-' in detid:
+ start, stop = detid.split('-')
+ masked_detids += list(range(int(start), int(stop) + 1))
+ else:
+ masked_detids.append(int(detid))
+
+ return MaskedDetectorIDs(
+ sc.array(dims=['detector_id'], values=masked_detids, unit=None, dtype='int32')
+ )
diff --git a/src/ess/sans/masking.py b/src/ess/sans/masking.py
index 01388109..6cec87a9 100644
--- a/src/ess/sans/masking.py
+++ b/src/ess/sans/masking.py
@@ -8,19 +8,19 @@
import scipp as sc
from .types import (
+ CalibratedDetector,
DetectorIDs,
DetectorMasks,
MaskedData,
MaskedDetectorIDs,
PixelMaskFilename,
- RawDetector,
SampleRun,
ScatteringRunType,
TofData,
)
-def get_detector_ids_from_detector(data: RawDetector[SampleRun]) -> DetectorIDs:
+def get_detector_ids_from_detector(data: CalibratedDetector[SampleRun]) -> DetectorIDs:
"""Extract detector IDs from a detector."""
return DetectorIDs(
data.coords[
diff --git a/src/ess/sans/types.py b/src/ess/sans/types.py
index 19f5528f..0e1f45d0 100644
--- a/src/ess/sans/types.py
+++ b/src/ess/sans/types.py
@@ -11,53 +11,36 @@
import sciline
import scipp as sc
+from ess.reduce.nexus import generic_types as reduce_gt
+from ess.reduce.nexus import types as reduce_t
from ess.reduce.uncertainty import UncertaintyBroadcastMode as _UncertaintyBroadcastMode
-UncertaintyBroadcastMode = _UncertaintyBroadcastMode
-
-# 1 TypeVars used to parametrize the generic parts of the workflow
-
-# 1.1 Run types
-BackgroundRun = NewType('BackgroundRun', int)
-"""Background run: the run with only the solvent which the sample is placed in."""
-EmptyBeamRun = NewType('EmptyBeamRun', int)
-"""Run (sometimes called 'direct run') where the sample holder was empty.
-It is used for reading the data from the transmission monitor."""
-SampleRun = NewType('SampleRun', int)
-"""Sample run: the run with the sample placed in the solvent inside the sample holder.
-"""
-
-ScatteringRunType = TypeVar(
- 'ScatteringRunType',
- SampleRun,
- BackgroundRun,
-)
-
+BackgroundRun = reduce_gt.BackgroundRun
+CalibratedDetector = reduce_gt.CalibratedDetector
+CalibratedMonitor = reduce_gt.CalibratedMonitor
+DetectorData = reduce_gt.DetectorData
+DetectorPositionOffset = reduce_gt.DetectorPositionOffset
+EmptyBeamRun = reduce_gt.EmptyBeamRun
+Filename = reduce_gt.Filename
+Incident = reduce_gt.Incident
+MonitorData = reduce_gt.MonitorData
+MonitorPositionOffset = reduce_gt.MonitorPositionOffset
+MonitorType = reduce_gt.MonitorType
+NeXusMonitorName = reduce_gt.NeXusMonitorName
+NeXusDetector = reduce_gt.NeXusDetector
+NeXusMonitor = reduce_gt.NeXusMonitor
+RunType = reduce_gt.RunType
+SampleRun = reduce_gt.SampleRun
+ScatteringRunType = reduce_gt.ScatteringRunType
+Transmission = reduce_gt.Transmission
+TransmissionRun = reduce_gt.TransmissionRun
+SamplePosition = reduce_gt.SamplePosition
+SourcePosition = reduce_gt.SourcePosition
+
+DetectorBankSizes = reduce_t.DetectorBankSizes
+NeXusDetectorName = reduce_t.NeXusDetectorName
-class TransmissionRun(sciline.Scope[ScatteringRunType, int], int):
- """Mapping between ScatteringRunType and transmission run.
- In the case where no transmission run is provided, the transmission run should be
- the same as the measurement (sample or background) run."""
-
-
-RunType = TypeVar(
- 'RunType',
- BackgroundRun,
- EmptyBeamRun,
- SampleRun,
- # Note that mypy does not seem to like this nesting, may need to find a workaround
- TransmissionRun[SampleRun],
- TransmissionRun[BackgroundRun],
-)
-"""TypeVar used for specifying BackgroundRun, EmptyBeamRun or SampleRun"""
-
-# 1.2 Monitor types
-Incident = NewType('Incident', int)
-"""Incident monitor"""
-Transmission = NewType('Transmission', int)
-"""Transmission monitor"""
-MonitorType = TypeVar('MonitorType', Incident, Transmission)
-"""TypeVar used for specifying Incident or Transmission monitor type"""
+UncertaintyBroadcastMode = _UncertaintyBroadcastMode
# 1.3 Numerator and denominator of IofQ
Numerator = NewType('Numerator', sc.DataArray)
@@ -68,9 +51,6 @@ class TransmissionRun(sciline.Scope[ScatteringRunType, int], int):
"""TypeVar used for specifying Numerator or Denominator of IofQ"""
# 1.4 Entry paths in NeXus files
-NeXusDetectorName = NewType('NeXusDetectorName', str)
-"""Name of detector entry in NeXus file"""
-
PixelShapePath = NewType('PixelShapePath', str)
"""
Name of the entry where the pixel shape is stored in the NeXus file
@@ -147,19 +127,8 @@ class TransmissionRun(sciline.Scope[ScatteringRunType, int], int):
"""Filename of the output"""
-class NeXusMonitorName(sciline.Scope[MonitorType, str], str):
- """Name of Incident|Transmission monitor in NeXus file"""
-
-
PixelMaskFilename = NewType('PixelMaskFilename', str)
-FilenameType = TypeVar('FilenameType', bound=str)
-
-
-class Filename(sciline.Scope[RunType, str], str):
- """Filename of a run"""
-
-
DetectorIDs = NewType('DetectorIDs', sc.Variable)
"""1-D variable listing all detector IDs."""
@@ -174,22 +143,6 @@ class Filename(sciline.Scope[RunType, str], str):
# 3 Workflow (intermediate) results
-class RawSource(sciline.Scope[RunType, sc.DataGroup], sc.DataGroup):
- """Raw source from NeXus file"""
-
-
-class RawSample(sciline.Scope[RunType, sc.DataGroup], sc.DataGroup):
- """Raw sample from NeXus file"""
-
-
-class SamplePosition(sciline.Scope[RunType, sc.Variable], sc.Variable):
- """Sample position"""
-
-
-class SourcePosition(sciline.Scope[RunType, sc.Variable], sc.Variable):
- """Source position"""
-
-
DirectBeam = NewType('DirectBeam', sc.DataArray | None)
"""Direct beam"""
@@ -221,40 +174,6 @@ class MaskedSolidAngle(sciline.Scope[ScatteringRunType, sc.DataArray], sc.DataAr
"""Same as :py:class:`SolidAngle`, but with pixel masks applied"""
-class NeXusDetector(sciline.Scope[RunType, sc.DataGroup], sc.DataGroup):
- """Detector data, loaded from a NeXus file, containing not only neutron events
- but also pixel shape information, transformations, ..."""
-
-
-class NeXusMonitor(
- sciline.ScopeTwoParams[RunType, MonitorType, sc.DataGroup], sc.DataGroup
-):
- """Monitor data loaded from a NeXus file, containing not only neutron events
- but also transformations, ..."""
-
-
-class DetectorEventData(sciline.Scope[RunType, sc.DataArray], sc.DataArray):
- """Event data loaded from a detector in a NeXus file"""
-
-
-class MonitorEventData(
- sciline.ScopeTwoParams[RunType, MonitorType, sc.DataArray], sc.DataArray
-):
- """Event data loaded from a monitor in a NeXus file"""
-
-
-class RawDetector(sciline.Scope[ScatteringRunType, sc.DataArray], sc.DataArray):
- """Raw detector component extracted from :py:class:`NeXusDetector`"""
-
-
-class CalibratedDetector(sciline.Scope[ScatteringRunType, sc.DataArray], sc.DataArray):
- """Calibrated version of raw detector"""
-
-
-class DetectorData(sciline.Scope[ScatteringRunType, sc.DataArray], sc.DataArray):
- """Calibrated detector with added raw event data"""
-
-
class TofData(sciline.Scope[ScatteringRunType, sc.DataArray], sc.DataArray):
"""Data with a time-of-flight coordinate"""
@@ -334,20 +253,6 @@ class IofQxy(sciline.Scope[ScatteringRunType, sc.DataArray], sc.DataArray):
"""I(Qx, Qy) with background (given by I(Qx, Qy) of the background run) subtracted"""
-class RawMonitor(
- sciline.ScopeTwoParams[RunType, MonitorType, sc.DataArray], sc.DataArray
-):
- """Raw monitor data"""
-
-
-class RawMonitorData(
- sciline.ScopeTwoParams[RunType, MonitorType, sc.DataArray], sc.DataArray
-):
- """Raw monitor data where variances and necessary coordinates
- (e.g. source position) have been added, and where optionally some
- user configuration was applied to some of the coordinates."""
-
-
class WavelengthMonitor(
sciline.ScopeTwoParams[RunType, MonitorType, sc.DataArray], sc.DataArray
):
diff --git a/tests/isissans/sans2d_reduction_test.py b/tests/isissans/sans2d_reduction_test.py
index 653cb5e3..09a76dc1 100644
--- a/tests/isissans/sans2d_reduction_test.py
+++ b/tests/isissans/sans2d_reduction_test.py
@@ -12,7 +12,9 @@
BackgroundRun,
BackgroundSubtractedIofQ,
BeamCenter,
+ CalibratedDetector,
CorrectForGravity,
+ DetectorData,
DimsToKeep,
DirectBeam,
DirectBeamFilename,
@@ -24,7 +26,6 @@
NeXusMonitorName,
NonBackgroundWavelengthRange,
QBins,
- RawDetector,
ReturnEvents,
SampleRun,
SolidAngle,
@@ -76,51 +77,40 @@ def make_params() -> dict:
return params
-def sans2d_providers():
- return list(
- sans.providers
- + isis.providers
- + isis.sans2d.providers
- + isis.mantidio.providers
- + (
- isis.io.load_tutorial_direct_beam,
- isis.io.load_tutorial_run,
- isis.io.transmission_from_background_run,
- isis.io.transmission_from_sample_run,
- )
- )
+@pytest.fixture()
+def pipeline():
+ wf = isis.sans2d.Sans2dTutorialWorkflow()
+ wf.insert(isis.io.transmission_from_background_run)
+ wf.insert(isis.io.transmission_from_sample_run)
+ for key, param in make_params().items():
+ wf[key] = param
+ return wf
-def test_can_create_pipeline():
- sciline.Pipeline(sans2d_providers(), params=make_params())
+def test_can_create_pipeline(pipeline):
+ pipeline.get(IofQ[SampleRun])
@pytest.mark.parametrize(
'uncertainties',
[UncertaintyBroadcastMode.drop, UncertaintyBroadcastMode.upper_bound],
)
-def test_pipeline_can_compute_background_subtracted_IofQ(uncertainties):
- params = make_params()
- params[UncertaintyBroadcastMode] = uncertainties
- pipeline = sciline.Pipeline(sans2d_providers(), params=params)
+def test_pipeline_can_compute_background_subtracted_IofQ(pipeline, uncertainties):
+ pipeline[UncertaintyBroadcastMode] = uncertainties
result = pipeline.compute(BackgroundSubtractedIofQ)
assert result.dims == ('Q',)
-def test_pipeline_can_compute_background_subtracted_IofQ_in_wavelength_bands():
- params = make_params()
- params[WavelengthBands] = sc.linspace(
+def test_pipeline_can_compute_background_subtracted_IofQ_in_wavelength_bands(pipeline):
+ pipeline[WavelengthBands] = sc.linspace(
'wavelength', start=2.0, stop=16.0, num=11, unit='angstrom'
)
- pipeline = sciline.Pipeline(sans2d_providers(), params=params)
result = pipeline.compute(BackgroundSubtractedIofQ)
assert result.dims == ('band', 'Q')
assert result.sizes['band'] == 10
-def test_pipeline_wavelength_bands_is_optional():
- params = make_params()
- pipeline = sciline.Pipeline(sans2d_providers(), params=params)
+def test_pipeline_wavelength_bands_is_optional(pipeline):
pipeline[BeamCenter] = sans.beam_center_from_center_of_mass(pipeline)
noband = pipeline.compute(BackgroundSubtractedIofQ)
assert pipeline.compute(WavelengthBands) is None
@@ -131,10 +121,8 @@ def test_pipeline_wavelength_bands_is_optional():
assert sc.identical(noband, withband)
-def test_workflow_is_deterministic():
- params = make_params()
- params[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.drop
- pipeline = sciline.Pipeline(sans2d_providers(), params=params)
+def test_workflow_is_deterministic(pipeline):
+ pipeline[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.drop
pipeline[BeamCenter] = sans.beam_center_from_center_of_mass(pipeline)
# This is Sciline's default scheduler, but we want to be explicit here
scheduler = sciline.scheduler.DaskScheduler()
@@ -144,45 +132,38 @@ def test_workflow_is_deterministic():
assert sc.identical(sc.values(result), sc.values(reference))
-def test_pipeline_raises_VariancesError_if_normalization_errors_not_dropped():
- params = make_params()
- params[NonBackgroundWavelengthRange] = (
+def test_pipeline_raises_VariancesError_if_normalization_errors_not_dropped(pipeline):
+ pipeline[NonBackgroundWavelengthRange] = (
None # Make sure we raise in iofq_denominator
)
- params[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.fail
- pipeline = sciline.Pipeline(sans2d_providers(), params=params)
+ pipeline[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.fail
with pytest.raises(sc.VariancesError):
pipeline.compute(BackgroundSubtractedIofQ)
-def test_uncertainty_broadcast_mode_drop_yields_smaller_variances():
- params = make_params()
+def test_uncertainty_broadcast_mode_drop_yields_smaller_variances(pipeline):
# Errors with the full range have some NaNs or infs
- params[QBins] = sc.linspace(
+ pipeline[QBins] = sc.linspace(
dim='Q', start=0.01, stop=0.5, num=141, unit='1/angstrom'
)
- params[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.drop
- pipeline = sciline.Pipeline(sans2d_providers(), params=params)
+ pipeline[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.drop
drop = pipeline.compute(IofQ[SampleRun]).data
- params[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.upper_bound
- pipeline = sciline.Pipeline(sans2d_providers(), params=params)
+ pipeline[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.upper_bound
upper_bound = pipeline.compute(IofQ[SampleRun]).data
assert sc.all(sc.variances(drop) < sc.variances(upper_bound)).value
-def test_pipeline_can_visualize_background_subtracted_IofQ():
- pipeline = sciline.Pipeline(sans2d_providers(), params=make_params())
+def test_pipeline_can_visualize_background_subtracted_IofQ(pipeline):
pipeline.visualize(BackgroundSubtractedIofQ)
-def test_pipeline_can_compute_intermediate_results():
- pipeline = sciline.Pipeline(sans2d_providers(), params=make_params())
+def test_pipeline_can_compute_intermediate_results(pipeline):
result = pipeline.compute(SolidAngle[SampleRun])
assert result.dims == ('spectrum',)
def pixel_dependent_direct_beam(
- filename: DirectBeamFilename, shape: RawDetector[SampleRun]
+ filename: DirectBeamFilename, shape: CalibratedDetector[SampleRun]
) -> DirectBeam:
direct_beam = isis.io.load_tutorial_direct_beam(filename)
sizes = {'spectrum': shape.sizes['spectrum'], **direct_beam.sizes}
@@ -193,10 +174,8 @@ def pixel_dependent_direct_beam(
'uncertainties',
[UncertaintyBroadcastMode.drop, UncertaintyBroadcastMode.upper_bound],
)
-def test_pixel_dependent_direct_beam_is_supported(uncertainties):
- params = make_params()
- params[UncertaintyBroadcastMode] = uncertainties
- pipeline = sciline.Pipeline(sans2d_providers(), params=params)
+def test_pixel_dependent_direct_beam_is_supported(pipeline, uncertainties):
+ pipeline[UncertaintyBroadcastMode] = uncertainties
pipeline.insert(pixel_dependent_direct_beam)
pipeline[BeamCenter] = sc.vector([0, 0, 0], unit='m')
result = pipeline.compute(BackgroundSubtractedIofQ)
@@ -206,10 +185,7 @@ def test_pixel_dependent_direct_beam_is_supported(uncertainties):
MANTID_BEAM_CENTER = sc.vector([0.09288, -0.08195, 0], unit='m')
-def test_beam_center_from_center_of_mass_is_close_to_verified_result():
- params = make_params()
- providers = sans2d_providers()
- pipeline = sciline.Pipeline(providers, params=params)
+def test_beam_center_from_center_of_mass_is_close_to_verified_result(pipeline):
center = sans.beam_center_from_center_of_mass(pipeline)
# This is the result obtained from Mantid, using the full IofQ
# calculation. The difference is about 3 mm in X or Y, probably due to a bias
@@ -217,20 +193,13 @@ def test_beam_center_from_center_of_mass_is_close_to_verified_result():
assert sc.allclose(center, MANTID_BEAM_CENTER, atol=sc.scalar(3e-3, unit='m'))
-def test_beam_center_from_center_of_mass_independent_of_set_beam_center():
- params = make_params()
- providers = sans2d_providers()
- pipeline = sciline.Pipeline(providers, params=params)
+def test_beam_center_from_center_of_mass_independent_of_set_beam_center(pipeline):
pipeline[BeamCenter] = sc.vector([0.1, -0.1, 0], unit='m')
center = sans.beam_center_from_center_of_mass(pipeline)
assert sc.allclose(center, MANTID_BEAM_CENTER, atol=sc.scalar(3e-3, unit='m'))
-def test_beam_center_finder_without_direct_beam_reproduces_verified_result():
- params = make_params()
- del params[DirectBeamFilename]
- providers = sans2d_providers()
- pipeline = sciline.Pipeline(providers, params=params)
+def test_beam_center_finder_without_direct_beam_reproduces_verified_result(pipeline):
pipeline[DirectBeam] = None
center = sans.beam_center_finder.beam_center_from_iofq(
workflow=pipeline, q_bins=sc.linspace('Q', 0.02, 0.3, 71, unit='1/angstrom')
@@ -238,20 +207,15 @@ def test_beam_center_finder_without_direct_beam_reproduces_verified_result():
assert sc.allclose(center, MANTID_BEAM_CENTER, atol=sc.scalar(2e-3, unit='m'))
-def test_beam_center_can_get_closer_to_verified_result_with_low_counts_mask():
+def test_beam_center_can_get_closer_to_verified_result_with_low_counts_mask(pipeline):
def low_counts_mask(
- sample: RawDetector[SampleRun],
+ sample: DetectorData[SampleRun],
low_counts_threshold: sans2d.LowCountThreshold,
) -> sans2d.SampleHolderMask:
return sans2d.SampleHolderMask(sample.hist().data < low_counts_threshold)
- params = make_params()
- params[sans2d.LowCountThreshold] = sc.scalar(80.0, unit='counts')
- del params[DirectBeamFilename]
- providers = sans2d_providers()
- providers.remove(sans2d.sample_holder_mask)
- providers.append(low_counts_mask)
- pipeline = sciline.Pipeline(providers, params=params)
+ pipeline[sans2d.LowCountThreshold] = sc.scalar(80.0, unit='counts')
+ pipeline.insert(low_counts_mask) # replaces sans2d.sample_holder_mask
pipeline[DirectBeam] = None
q_bins = sc.linspace('Q', 0.02, 0.3, 71, unit='1/angstrom')
center = sans.beam_center_finder.beam_center_from_iofq(
@@ -260,10 +224,7 @@ def low_counts_mask(
assert sc.allclose(center, MANTID_BEAM_CENTER, atol=sc.scalar(5e-4, unit='m'))
-def test_beam_center_finder_works_with_direct_beam():
- params = make_params()
- providers = sans2d_providers()
- pipeline = sciline.Pipeline(providers, params=params)
+def test_beam_center_finder_works_with_direct_beam(pipeline):
q_bins = sc.linspace('Q', 0.02, 0.3, 71, unit='1/angstrom')
center_with_direct_beam = sans.beam_center_finder.beam_center_from_iofq(
workflow=pipeline, q_bins=q_bins
@@ -273,10 +234,7 @@ def test_beam_center_finder_works_with_direct_beam():
)
-def test_beam_center_finder_independent_of_set_beam_center():
- params = make_params()
- providers = sans2d_providers()
- pipeline = sciline.Pipeline(providers, params=params)
+def test_beam_center_finder_independent_of_set_beam_center(pipeline):
pipeline[BeamCenter] = sc.vector([0.1, -0.1, 0], unit='m')
q_bins = sc.linspace('Q', 0.02, 0.3, 71, unit='1/angstrom')
center_with_direct_beam = sans.beam_center_finder.beam_center_from_iofq(
@@ -287,11 +245,8 @@ def test_beam_center_finder_independent_of_set_beam_center():
)
-def test_beam_center_finder_works_with_pixel_dependent_direct_beam():
+def test_beam_center_finder_works_with_pixel_dependent_direct_beam(pipeline):
q_bins = sc.linspace('Q', 0.02, 0.3, 71, unit='1/angstrom')
- params = make_params()
- providers = sans2d_providers()
- pipeline = sciline.Pipeline(providers, params=params)
center_pixel_independent_direct_beam = (
sans.beam_center_finder.beam_center_from_iofq(workflow=pipeline, q_bins=q_bins)
)
@@ -304,8 +259,6 @@ def test_beam_center_finder_works_with_pixel_dependent_direct_beam():
}
).copy()
- providers = sans2d_providers()
- pipeline = sciline.Pipeline(providers, params=params)
pipeline[DirectBeam] = pixel_dependent_direct_beam
center = sans.beam_center_finder.beam_center_from_iofq(
@@ -314,13 +267,11 @@ def test_beam_center_finder_works_with_pixel_dependent_direct_beam():
assert sc.identical(center, center_pixel_independent_direct_beam)
-def test_workflow_runs_without_gravity_if_beam_center_is_provided():
- params = make_params()
- params[CorrectForGravity] = False
- pipeline = sciline.Pipeline(sans2d_providers(), params=params)
- da = pipeline.compute(RawDetector[SampleRun])
+def test_workflow_runs_without_gravity_if_beam_center_is_provided(pipeline):
+ pipeline[CorrectForGravity] = False
+ da = pipeline.compute(DetectorData[SampleRun])
del da.coords['gravity']
- pipeline[RawDetector[SampleRun]] = da
+ pipeline[DetectorData[SampleRun]] = da
pipeline[BeamCenter] = MANTID_BEAM_CENTER
result = pipeline.compute(BackgroundSubtractedIofQ)
assert result.dims == ('Q',)
diff --git a/tests/isissans/zoom_reduction_test.py b/tests/isissans/zoom_reduction_test.py
index d25dd3d8..7a2ffb95 100644
--- a/tests/isissans/zoom_reduction_test.py
+++ b/tests/isissans/zoom_reduction_test.py
@@ -1,6 +1,7 @@
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (c) 2023 Scipp contributors (https://github.com/scipp)
-import sciline
+import ess.isissans.data # noqa: F401
+import pytest
import scipp as sc
from ess import isissans as isis
from ess import sans
@@ -8,6 +9,7 @@
from ess.sans.types import (
BeamCenter,
CorrectForGravity,
+ DetectorPositionOffset,
Filename,
Incident,
IofQ,
@@ -31,8 +33,8 @@ def make_params() -> dict:
isis.CalibrationFilename: isis.data.zoom_tutorial_calibration(),
Filename[sans.types.SampleRun]: isis.data.zoom_tutorial_sample_run(),
Filename[sans.types.EmptyBeamRun]: isis.data.zoom_tutorial_empty_beam_run(),
- isis.SampleOffset: sc.vector([0.0, 0.0, 0.11], unit='m'),
- isis.DetectorBankOffset: sc.vector([0.0, 0.0, 0.5], unit='m'),
+ isis.general.SampleOffset: sc.vector([0.0, 0.0, 0.11], unit='m'),
+ DetectorPositionOffset[SampleRun]: sc.vector([0.0, 0.0, 0.5], unit='m'),
}
params[NeXusMonitorName[Incident]] = 'monitor3'
@@ -55,22 +57,17 @@ def make_params() -> dict:
return params
-def zoom_providers():
- return list(
- sans.providers
- + isis.providers
- + isis.mantidio.providers
- + (
- isis.io.load_tutorial_direct_beam,
- isis.io.load_tutorial_run,
- isis.io.transmission_from_background_run,
- isis.io.transmission_from_sample_run,
- )
- )
+@pytest.fixture()
+def pipeline():
+ wf = isis.zoom.ZoomTutorialWorkflow()
+ wf.insert(isis.io.transmission_from_background_run)
+ wf.insert(isis.io.transmission_from_sample_run)
+ for key, param in make_params().items():
+ wf[key] = param
+ return wf
-def test_can_create_pipeline():
- pipeline = sciline.Pipeline(zoom_providers(), params=make_params())
+def test_can_create_pipeline(pipeline):
pipeline[BeamCenter] = sc.vector([0, 0, 0], unit='m')
pipeline = sans.with_pixel_mask_filenames(
pipeline, isis.data.zoom_tutorial_mask_filenames()
@@ -78,8 +75,7 @@ def test_can_create_pipeline():
pipeline.get(IofQ[SampleRun])
-def test_pipeline_can_compute_IofQ():
- pipeline = sciline.Pipeline(zoom_providers(), params=make_params())
+def test_pipeline_can_compute_IofQ(pipeline):
pipeline[BeamCenter] = sc.vector([0, 0, 0], unit='m')
pipeline = sans.with_pixel_mask_filenames(
pipeline, isis.data.zoom_tutorial_mask_filenames()
@@ -88,8 +84,7 @@ def test_pipeline_can_compute_IofQ():
assert result.dims == ('Q',)
-def test_pipeline_can_compute_IofQxQy():
- pipeline = sciline.Pipeline(zoom_providers(), params=make_params())
+def test_pipeline_can_compute_IofQxQy(pipeline):
pipeline[BeamCenter] = sc.vector([0, 0, 0], unit='m')
pipeline = sans.with_pixel_mask_filenames(
pipeline, isis.data.zoom_tutorial_mask_filenames()
diff --git a/tests/loki/common.py b/tests/loki/common.py
index fa13d742..90f02e3b 100644
--- a/tests/loki/common.py
+++ b/tests/loki/common.py
@@ -1,9 +1,8 @@
# SPDX-License-Identifier: BSD-3-Clause
# Copyright (c) 2023 Scipp contributors (https://github.com/scipp)
-from collections.abc import Callable
-
+import sciline
import scipp as sc
-from ess import loki, sans
+from ess import loki
from ess.sans.types import (
BackgroundRun,
@@ -24,46 +23,31 @@
)
-def make_params(no_masks: bool = True) -> dict:
- params = loki.default_parameters()
+def make_workflow(no_masks: bool = True) -> sciline.Pipeline:
+ wf = loki.LokiAtLarmorWorkflow()
- params[NeXusDetectorName] = 'larmor_detector'
- params[Filename[SampleRun]] = loki.data.loki_tutorial_sample_run_60339()
- params[Filename[BackgroundRun]] = loki.data.loki_tutorial_background_run_60393()
- params[Filename[TransmissionRun[SampleRun]]] = (
+ wf[NeXusDetectorName] = 'larmor_detector'
+ wf[Filename[SampleRun]] = loki.data.loki_tutorial_sample_run_60339()
+ wf[Filename[BackgroundRun]] = loki.data.loki_tutorial_background_run_60393()
+ wf[Filename[TransmissionRun[SampleRun]]] = (
loki.data.loki_tutorial_sample_transmission_run()
)
- params[Filename[TransmissionRun[BackgroundRun]]] = (
- loki.data.loki_tutorial_run_60392()
- )
- params[Filename[EmptyBeamRun]] = loki.data.loki_tutorial_run_60392()
+ wf[Filename[TransmissionRun[BackgroundRun]]] = loki.data.loki_tutorial_run_60392()
+ wf[Filename[EmptyBeamRun]] = loki.data.loki_tutorial_run_60392()
- params[WavelengthBins] = sc.linspace(
+ wf[WavelengthBins] = sc.linspace(
'wavelength', start=1.0, stop=13.0, num=51, unit='angstrom'
)
- params[CorrectForGravity] = True
- params[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.upper_bound
- params[ReturnEvents] = False
+ wf[CorrectForGravity] = True
+ wf[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.upper_bound
+ wf[ReturnEvents] = False
- params[QxBins] = sc.linspace('Qx', start=-0.3, stop=0.3, num=91, unit='1/angstrom')
- params[QyBins] = sc.linspace('Qy', start=-0.2, stop=0.3, num=78, unit='1/angstrom')
- params[QBins] = sc.linspace('Q', start=0.01, stop=0.3, num=101, unit='1/angstrom')
+ wf[QxBins] = sc.linspace('Qx', start=-0.3, stop=0.3, num=91, unit='1/angstrom')
+ wf[QyBins] = sc.linspace('Qy', start=-0.2, stop=0.3, num=78, unit='1/angstrom')
+ wf[QBins] = sc.linspace('Q', start=0.01, stop=0.3, num=101, unit='1/angstrom')
# We have no direct-beam file for Loki currently
- params[DirectBeam] = None
+ wf[DirectBeam] = None
if no_masks:
- params[DetectorMasks] = {}
-
- return params
+ wf[DetectorMasks] = {}
-
-def loki_providers() -> list[Callable]:
- from ess.isissans.io import read_xml_detector_masking
-
- return list(
- sans.providers
- + loki.providers
- + (
- read_xml_detector_masking,
- loki.io.dummy_load_sample,
- )
- )
+ return wf
diff --git a/tests/loki/directbeam_test.py b/tests/loki/directbeam_test.py
index bd162581..4dbf9e75 100644
--- a/tests/loki/directbeam_test.py
+++ b/tests/loki/directbeam_test.py
@@ -3,7 +3,6 @@
import sys
from pathlib import Path
-import sciline
import scipp as sc
from ess import loki, sans
from scipp.scipy.interpolate import interp1d
@@ -17,7 +16,7 @@
)
sys.path.insert(0, str(Path(__file__).resolve().parent))
-from common import loki_providers, make_params
+from common import make_workflow
def _get_I0(qbins: sc.Variable) -> sc.Variable:
@@ -28,17 +27,13 @@ def _get_I0(qbins: sc.Variable) -> sc.Variable:
def test_can_compute_direct_beam_for_all_pixels():
n_wavelength_bands = 10
- params = make_params()
- params[WavelengthBands] = sc.linspace(
- 'wavelength',
- params[WavelengthBins].min(),
- params[WavelengthBins].max(),
- n_wavelength_bands + 1,
+ pipeline = make_workflow()
+ edges = pipeline.compute(WavelengthBins)
+ pipeline[WavelengthBands] = sc.linspace(
+ 'wavelength', edges.min(), edges.max(), n_wavelength_bands + 1
)
- providers = loki_providers()
- pipeline = sciline.Pipeline(providers, params=params)
pipeline[BeamCenter] = sc.vector([0, 0, 0], unit='m')
- I0 = _get_I0(qbins=params[QBins])
+ I0 = _get_I0(qbins=pipeline.compute(QBins))
results = sans.direct_beam(workflow=pipeline, I0=I0, niter=4)
iofq_full = results[-1]['iofq_full']
@@ -53,22 +48,16 @@ def test_can_compute_direct_beam_for_all_pixels():
def test_can_compute_direct_beam_with_overlapping_wavelength_bands():
n_wavelength_bands = 10
- params = make_params()
# Bands have double the width
- edges = sc.linspace(
- 'band',
- params[WavelengthBins].min(),
- params[WavelengthBins].max(),
- n_wavelength_bands + 2,
- )
- params[WavelengthBands] = sc.concat(
+ pipeline = make_workflow()
+ edges = pipeline.compute(WavelengthBins)
+ edges = sc.linspace('band', edges.min(), edges.max(), n_wavelength_bands + 2)
+ pipeline[WavelengthBands] = sc.concat(
[edges[:-2], edges[2::]], dim='wavelength'
).transpose()
- providers = loki_providers()
- pipeline = sciline.Pipeline(providers, params=params)
pipeline[BeamCenter] = sc.vector([0, 0, 0], unit='m')
- I0 = _get_I0(qbins=params[QBins])
+ I0 = _get_I0(qbins=pipeline.compute(QBins))
results = sans.direct_beam(workflow=pipeline, I0=I0, niter=4)
iofq_full = results[-1]['iofq_full']
@@ -83,18 +72,14 @@ def test_can_compute_direct_beam_with_overlapping_wavelength_bands():
def test_can_compute_direct_beam_per_layer():
n_wavelength_bands = 10
- params = make_params()
- params[WavelengthBands] = sc.linspace(
- 'wavelength',
- params[WavelengthBins].min(),
- params[WavelengthBins].max(),
- n_wavelength_bands + 1,
+ pipeline = make_workflow()
+ edges = pipeline.compute(WavelengthBins)
+ pipeline[WavelengthBands] = sc.linspace(
+ 'wavelength', edges.min(), edges.max(), n_wavelength_bands + 1
)
- params[DimsToKeep] = ['layer']
- providers = loki_providers()
- pipeline = sciline.Pipeline(providers, params=params)
+ pipeline[DimsToKeep] = ['layer']
pipeline[BeamCenter] = sc.vector([0, 0, 0], unit='m')
- I0 = _get_I0(qbins=params[QBins])
+ I0 = _get_I0(qbins=pipeline.compute(QBins))
results = sans.direct_beam(workflow=pipeline, I0=I0, niter=4)
iofq_full = results[-1]['iofq_full']
@@ -111,18 +96,14 @@ def test_can_compute_direct_beam_per_layer():
def test_can_compute_direct_beam_per_layer_and_straw():
n_wavelength_bands = 10
- params = make_params()
- params[WavelengthBands] = sc.linspace(
- 'wavelength',
- params[WavelengthBins].min(),
- params[WavelengthBins].max(),
- n_wavelength_bands + 1,
+ pipeline = make_workflow()
+ edges = pipeline.compute(WavelengthBins)
+ pipeline[WavelengthBands] = sc.linspace(
+ 'wavelength', edges.min(), edges.max(), n_wavelength_bands + 1
)
- params[DimsToKeep] = ('layer', 'straw')
- providers = loki_providers()
- pipeline = sciline.Pipeline(providers, params=params)
+ pipeline[DimsToKeep] = ('layer', 'straw')
pipeline[BeamCenter] = sc.vector([0, 0, 0], unit='m')
- I0 = _get_I0(qbins=params[QBins])
+ I0 = _get_I0(qbins=pipeline.compute(QBins))
results = sans.direct_beam(workflow=pipeline, I0=I0, niter=4)
iofq_full = results[-1]['iofq_full']
diff --git a/tests/loki/iofq_test.py b/tests/loki/iofq_test.py
index 86a798ea..2bb5a652 100644
--- a/tests/loki/iofq_test.py
+++ b/tests/loki/iofq_test.py
@@ -24,7 +24,6 @@
IofQ,
IofQxy,
MaskedData,
- NeXusDetectorName,
Numerator,
QBins,
QxBins,
@@ -37,20 +36,17 @@
)
sys.path.insert(0, str(Path(__file__).resolve().parent))
-from common import (
- loki_providers,
- make_params,
-)
+from common import make_workflow
def test_can_create_pipeline():
- pipeline = sciline.Pipeline(loki_providers(), params=make_params())
+ pipeline = make_workflow()
pipeline[BeamCenter] = sc.vector([0, 0, 0], unit='m')
pipeline.get(BackgroundSubtractedIofQ)
def test_can_create_pipeline_with_pixel_masks():
- pipeline = sciline.Pipeline(loki_providers(), params=make_params(no_masks=False))
+ pipeline = make_workflow(no_masks=False)
pipeline = sans.with_pixel_mask_filenames(
pipeline, loki.data.loki_tutorial_mask_filenames()
)
@@ -64,9 +60,8 @@ def test_can_create_pipeline_with_pixel_masks():
)
@pytest.mark.parametrize('qxy', [False, True])
def test_pipeline_can_compute_IofQ(uncertainties, qxy: bool):
- params = make_params(no_masks=False)
- params[UncertaintyBroadcastMode] = uncertainties
- pipeline = sciline.Pipeline(loki_providers(), params=params)
+ pipeline = make_workflow(no_masks=False)
+ pipeline[UncertaintyBroadcastMode] = uncertainties
pipeline = sans.with_pixel_mask_filenames(
pipeline, loki.data.loki_tutorial_mask_filenames()
)
@@ -74,14 +69,14 @@ def test_pipeline_can_compute_IofQ(uncertainties, qxy: bool):
if qxy:
result = pipeline.compute(BackgroundSubtractedIofQxy)
assert result.dims == ('Qy', 'Qx')
- assert sc.identical(result.coords['Qx'], params[QxBins])
- assert sc.identical(result.coords['Qy'], params[QyBins])
+ assert sc.identical(result.coords['Qx'], pipeline.compute(QxBins))
+ assert sc.identical(result.coords['Qy'], pipeline.compute(QyBins))
assert result.sizes['Qx'] == 90
assert result.sizes['Qy'] == 77
else:
result = pipeline.compute(BackgroundSubtractedIofQ)
assert result.dims == ('Q',)
- assert sc.identical(result.coords['Q'], params[QBins])
+ assert sc.identical(result.coords['Q'], pipeline.compute(QBins))
assert result.sizes['Q'] == 100
if uncertainties == UncertaintyBroadcastMode.drop:
test_dir = os.path.dirname(os.path.abspath(__file__))
@@ -104,9 +99,8 @@ def test_pipeline_can_compute_IofQ(uncertainties, qxy: bool):
],
)
def test_pipeline_can_compute_IofQ_in_event_mode(uncertainties, target):
- params = make_params()
- params[UncertaintyBroadcastMode] = uncertainties
- pipeline = sciline.Pipeline(loki_providers(), params=params)
+ pipeline = make_workflow()
+ pipeline[UncertaintyBroadcastMode] = uncertainties
pipeline[BeamCenter] = sans.beam_center_from_center_of_mass(pipeline)
reference = pipeline.compute(target)
pipeline[ReturnEvents] = True
@@ -138,14 +132,13 @@ def test_pipeline_can_compute_IofQ_in_event_mode(uncertainties, target):
@pytest.mark.parametrize('qxy', [False, True])
def test_pipeline_can_compute_IofQ_in_wavelength_bands(qxy: bool):
- params = make_params()
- params[WavelengthBands] = sc.linspace(
+ pipeline = make_workflow()
+ pipeline[WavelengthBands] = sc.linspace(
'wavelength',
- params[WavelengthBins].min(),
- params[WavelengthBins].max(),
+ pipeline.compute(WavelengthBins).min(),
+ pipeline.compute(WavelengthBins).max(),
11,
)
- pipeline = sciline.Pipeline(loki_providers(), params=params)
pipeline[BeamCenter] = _compute_beam_center()
result = pipeline.compute(
BackgroundSubtractedIofQxy if qxy else BackgroundSubtractedIofQ
@@ -156,15 +149,13 @@ def test_pipeline_can_compute_IofQ_in_wavelength_bands(qxy: bool):
@pytest.mark.parametrize('qxy', [False, True])
def test_pipeline_can_compute_IofQ_in_overlapping_wavelength_bands(qxy: bool):
- params = make_params()
+ pipeline = make_workflow()
# Bands have double the width
- edges = sc.linspace(
- 'band', params[WavelengthBins].min(), params[WavelengthBins].max(), 12
- )
- params[WavelengthBands] = sc.concat(
+ edges = pipeline.compute(WavelengthBins)
+ edges = sc.linspace('band', edges.min(), edges.max(), 12)
+ pipeline[WavelengthBands] = sc.concat(
[edges[:-2], edges[2::]], dim='wavelength'
).transpose()
- pipeline = sciline.Pipeline(loki_providers(), params=params)
pipeline[BeamCenter] = _compute_beam_center()
result = pipeline.compute(
BackgroundSubtractedIofQxy if qxy else BackgroundSubtractedIofQ
@@ -175,9 +166,8 @@ def test_pipeline_can_compute_IofQ_in_overlapping_wavelength_bands(qxy: bool):
@pytest.mark.parametrize('qxy', [False, True])
def test_pipeline_can_compute_IofQ_in_layers(qxy: bool):
- params = make_params()
- params[DimsToKeep] = ['layer']
- pipeline = sciline.Pipeline(loki_providers(), params=params)
+ pipeline = make_workflow()
+ pipeline[DimsToKeep] = ['layer']
pipeline[BeamCenter] = _compute_beam_center()
result = pipeline.compute(
BackgroundSubtractedIofQxy if qxy else BackgroundSubtractedIofQ
@@ -187,13 +177,10 @@ def test_pipeline_can_compute_IofQ_in_layers(qxy: bool):
def _compute_beam_center():
- pipeline = sciline.Pipeline(loki_providers(), params=make_params())
- return sans.beam_center_from_center_of_mass(pipeline)
+ return sans.beam_center_from_center_of_mass(make_workflow())
def test_pipeline_can_compute_IofQ_merging_events_from_multiple_runs():
- params = make_params()
-
sample_runs = [
loki.data.loki_tutorial_sample_run_60250(),
loki.data.loki_tutorial_sample_run_60339(),
@@ -202,7 +189,7 @@ def test_pipeline_can_compute_IofQ_merging_events_from_multiple_runs():
loki.data.loki_tutorial_background_run_60248(),
loki.data.loki_tutorial_background_run_60393(),
]
- pipeline = sciline.Pipeline(loki_providers(), params=params)
+ pipeline = make_workflow()
pipeline[BeamCenter] = _compute_beam_center()
pipeline = sans.with_sample_runs(pipeline, runs=sample_runs)
@@ -213,10 +200,7 @@ def test_pipeline_can_compute_IofQ_merging_events_from_multiple_runs():
def test_pipeline_can_compute_IofQ_by_bank():
- params = make_params()
- del params[NeXusDetectorName]
-
- pipeline = sciline.Pipeline(loki_providers(), params=params)
+ pipeline = make_workflow()
pipeline[BeamCenter] = _compute_beam_center()
pipeline = sans.with_banks(pipeline, banks=['larmor_detector'])
@@ -225,7 +209,6 @@ def test_pipeline_can_compute_IofQ_by_bank():
def test_pipeline_can_compute_IofQ_merging_events_from_multiple_runs_by_bank():
- params = make_params()
sample_runs = [
loki.data.loki_tutorial_sample_run_60250(),
loki.data.loki_tutorial_sample_run_60339(),
@@ -234,7 +217,7 @@ def test_pipeline_can_compute_IofQ_merging_events_from_multiple_runs_by_bank():
loki.data.loki_tutorial_background_run_60248(),
loki.data.loki_tutorial_background_run_60393(),
]
- pipeline = sciline.Pipeline(loki_providers(), params=params)
+ pipeline = make_workflow()
pipeline[BeamCenter] = _compute_beam_center()
pipeline = sans.with_sample_runs(pipeline, runs=sample_runs)
@@ -253,9 +236,8 @@ def test_pipeline_can_compute_IofQ_merging_events_from_multiple_runs_by_bank():
def test_pipeline_IofQ_merging_events_yields_consistent_results():
N = 3
- params = make_params()
center = _compute_beam_center()
- pipeline_single = sciline.Pipeline(loki_providers(), params=params)
+ pipeline_single = make_workflow()
pipeline_single[BeamCenter] = center
sample_runs = [loki.data.loki_tutorial_sample_run_60339()] * N
@@ -285,8 +267,7 @@ def test_pipeline_IofQ_merging_events_yields_consistent_results():
def test_beam_center_from_center_of_mass_is_close_to_verified_result():
- params = make_params(no_masks=False)
- pipeline = sciline.Pipeline(loki_providers(), params=params)
+ pipeline = make_workflow(no_masks=False)
pipeline = sans.with_pixel_mask_filenames(
pipeline, loki.data.loki_tutorial_mask_filenames()
)
@@ -296,8 +277,7 @@ def test_beam_center_from_center_of_mass_is_close_to_verified_result():
def test_phi_with_gravity():
- params = make_params()
- pipeline = sciline.Pipeline(loki_providers(), params=params)
+ pipeline = make_workflow()
pipeline[BeamCenter] = _compute_beam_center()
pipeline[CorrectForGravity] = False
data_no_grav = pipeline.compute(