From 4698c0246d461b8dbb9ccd10dd037f5b477ab7d4 Mon Sep 17 00:00:00 2001 From: Simon Heybrock Date: Mon, 26 Aug 2024 09:39:27 +0200 Subject: [PATCH 1/8] Add dream.io.nexus.LoadNeXusWorkflow --- src/ess/dream/__init__.py | 3 -- src/ess/dream/io/nexus.py | 19 +++++++------ tests/dream/io/nexus_test.py | 54 +++++++++++++++--------------------- 3 files changed, 32 insertions(+), 44 deletions(-) diff --git a/src/ess/dream/__init__.py b/src/ess/dream/__init__.py index d003b17f..a6a83504 100644 --- a/src/ess/dream/__init__.py +++ b/src/ess/dream/__init__.py @@ -18,9 +18,6 @@ del importlib -providers = (*nexus.providers,) - - __all__ = [ 'DreamGeant4Workflow', 'default_parameters', diff --git a/src/ess/dream/io/nexus.py b/src/ess/dream/io/nexus.py index 3e9755a3..b0bb884c 100644 --- a/src/ess/dream/io/nexus.py +++ b/src/ess/dream/io/nexus.py @@ -13,7 +13,9 @@ but it is not possible to reshape the data into all the logical dimensions. """ -from ess import powder +import sciline +from ess.reduce.nexus.generic_workflow import GenericNeXusWorkflow +from ess.reduce.nexus.types import DetectorBankSizes DETECTOR_BANK_SIZES = { "endcap_backward_detector": { @@ -45,11 +47,10 @@ } -def dream_detector_bank_sizes() -> powder.types.DetectorBankSizes | None: - return powder.types.DetectorBankSizes(DETECTOR_BANK_SIZES) - - -providers = (*powder.nexus.providers, dream_detector_bank_sizes) -""" -Providers for loading and processing NeXus data. -""" +def LoadNeXusWorkflow() -> sciline.Pipeline: + """ + Workflow for loading NeXus data. + """ + wf = GenericNeXusWorkflow() + wf[DetectorBankSizes] = DETECTOR_BANK_SIZES + return wf diff --git a/tests/dream/io/nexus_test.py b/tests/dream/io/nexus_test.py index 7b28e4f1..6b0e6c70 100644 --- a/tests/dream/io/nexus_test.py +++ b/tests/dream/io/nexus_test.py @@ -3,28 +3,27 @@ import pytest import sciline import scipp as sc -from ess import dream, powder - -import ess.dream.data # noqa: F401 -from ess.dream import nexus -from ess.powder.types import ( +from ess import dream +from ess.reduce.nexus.generic_types import ( + CalibratedDetector, + CalibratedMonitor, + DetectorData, Filename, Monitor1, - NeXusDetectorName, NeXusMonitorName, - RawDetector, - RawMonitor, - ReducibleDetectorData, SampleRun, ) +from ess.reduce.nexus.types import NeXusDetectorName + +import ess.dream.data # noqa: F401 bank_dims = {'wire', 'module', 'segment', 'strip', 'counter'} hr_sans_dims = {'strip', 'other'} @pytest.fixture() -def providers(): - return (*nexus.providers, powder.nexus.dummy_load_sample) +def nexus_workflow() -> sciline.Pipeline: + return dream.io.nexus.LoadNeXusWorkflow() @pytest.fixture( @@ -44,9 +43,10 @@ def params(request): return params -def test_can_load_nexus_detector_data(providers, params): - pipeline = sciline.Pipeline(params=params, providers=providers) - result = pipeline.compute(RawDetector[SampleRun]) +def test_can_load_nexus_detector_data(nexus_workflow, params): + for key, value in params.items(): + nexus_workflow[key] = value + result = nexus_workflow.compute(CalibratedDetector[SampleRun]) assert ( set(result.dims) == hr_sans_dims if params[NeXusDetectorName] @@ -60,29 +60,19 @@ def test_can_load_nexus_detector_data(providers, params): assert sc.identical(result.data, result.coords['detector_number']) -def test_can_load_nexus_monitor_data(providers): - pipeline = sciline.Pipeline(providers=providers) - pipeline[Filename[SampleRun]] = dream.data.get_path( +def test_can_load_nexus_monitor_data(nexus_workflow): + nexus_workflow[Filename[SampleRun]] = dream.data.get_path( 'DREAM_nexus_sorted-2023-12-07.nxs' ) - pipeline[NeXusMonitorName[Monitor1]] = 'monitor_cave' - result = pipeline.compute(RawMonitor[SampleRun, Monitor1]) + nexus_workflow[NeXusMonitorName[Monitor1]] = 'monitor_cave' + result = nexus_workflow.compute(CalibratedMonitor[SampleRun, Monitor1]) assert result.sizes == {'event_time_zero': 0} -def test_load_fails_with_bad_detector_name(providers): - params = { - Filename[SampleRun]: dream.data.get_path('DREAM_nexus_sorted-2023-12-07.nxs'), - NeXusDetectorName: 'bad_detector', - } - pipeline = sciline.Pipeline(params=params, providers=providers) - with pytest.raises(KeyError, match='bad_detector'): - pipeline.compute(RawDetector[SampleRun]) - - -def test_assemble_nexus_detector_data(providers, params): - pipeline = sciline.Pipeline(params=params, providers=providers) - result = pipeline.compute(ReducibleDetectorData[SampleRun]) +def test_assemble_nexus_detector_data(nexus_workflow, params): + for key, value in params.items(): + nexus_workflow[key] = value + result = nexus_workflow.compute(DetectorData[SampleRun]) assert ( set(result.dims) == hr_sans_dims if params[NeXusDetectorName] From fc4a878d69cca83b3d75a6343b33a69555ef694f Mon Sep 17 00:00:00 2001 From: Simon Heybrock Date: Mon, 26 Aug 2024 10:04:03 +0200 Subject: [PATCH 2/8] Rename run types to be consistent with ESSsans --- .../dream/dream-data-reduction.ipynb | 2 +- src/ess/powder/types.py | 34 +++++++++++++++++-- tests/dream/geant4_reduction_test.py | 4 +-- 3 files changed, 34 insertions(+), 6 deletions(-) diff --git a/docs/user-guide/dream/dream-data-reduction.ipynb b/docs/user-guide/dream/dream-data-reduction.ipynb index f91fb0fc..00a6e3d4 100644 --- a/docs/user-guide/dream/dream-data-reduction.ipynb +++ b/docs/user-guide/dream/dream-data-reduction.ipynb @@ -66,7 +66,7 @@ "source": [ "workflow[Filename[SampleRun]] = dream.data.simulated_diamond_sample()\n", "workflow[Filename[VanadiumRun]] = dream.data.simulated_vanadium_sample()\n", - "workflow[Filename[EmptyCanRun]] = dream.data.simulated_empty_can()\n", + "workflow[Filename[BackgroundRun]] = dream.data.simulated_empty_can()\n", "workflow[CalibrationFilename] = None\n", "workflow[NeXusDetectorName] = \"mantle\"\n", "# The upper bounds mode is not yet implemented.\n", diff --git a/src/ess/powder/types.py b/src/ess/powder/types.py index 5697475d..fa30aa71 100644 --- a/src/ess/powder/types.py +++ b/src/ess/powder/types.py @@ -12,20 +12,48 @@ import sciline import scipp as sc +from ess.reduce.nexus import generic_types as reduce_gt +from ess.reduce.nexus import types as reduce_t from ess.reduce.uncertainty import UncertaintyBroadcastMode as _UncertaintyBroadcastMode +BackgroundRun = reduce_gt.BackgroundRun +CalibratedDetector = reduce_gt.CalibratedDetector +CalibratedMonitor = reduce_gt.CalibratedMonitor +DetectorData = reduce_gt.DetectorData +DetectorPositionOffset = reduce_gt.DetectorPositionOffset +EmptyBeamRun = reduce_gt.EmptyBeamRun +Filename = reduce_gt.Filename +Incident = reduce_gt.Incident +MonitorData = reduce_gt.MonitorData +MonitorPositionOffset = reduce_gt.MonitorPositionOffset +MonitorType = reduce_gt.MonitorType +NeXusMonitorName = reduce_gt.NeXusMonitorName +NeXusDetector = reduce_gt.NeXusDetector +NeXusMonitor = reduce_gt.NeXusMonitor +RunType = reduce_gt.RunType +SampleRun = reduce_gt.SampleRun +ScatteringRunType = reduce_gt.ScatteringRunType +Transmission = reduce_gt.Transmission +TransmissionRun = reduce_gt.TransmissionRun +SamplePosition = reduce_gt.SamplePosition +SourcePosition = reduce_gt.SourcePosition + +DetectorBankSizes = reduce_t.DetectorBankSizes +NeXusDetectorName = reduce_t.NeXusDetectorName + + # 1 TypeVars used to parametrize the generic parts of the workflow # 1.1 Run types -EmptyCanRun = NewType("EmptyCanRun", int) +BackgroundRun = NewType("BackgroundRun", int) """Empty sample can run.""" -EmptyInstrumentRun = NewType("EmptyInstrumentRun", int) +EmptyBeamRun = NewType("EmptyBeamRun", int) """Empty instrument run.""" SampleRun = NewType("SampleRun", int) """Sample run.""" VanadiumRun = NewType("VanadiumRun", int) """Vanadium run.""" -RunType = TypeVar("RunType", EmptyInstrumentRun, SampleRun, VanadiumRun) +RunType = TypeVar("RunType", EmptyBeamRun, SampleRun, VanadiumRun) """TypeVar used for specifying the run.""" # 1.2 Monitor types diff --git a/tests/dream/geant4_reduction_test.py b/tests/dream/geant4_reduction_test.py index 9d04f8e0..a45f93c6 100644 --- a/tests/dream/geant4_reduction_test.py +++ b/tests/dream/geant4_reduction_test.py @@ -8,9 +8,9 @@ from ess.powder.types import ( AccumulatedProtonCharge, + BackgroundRun, CalibrationFilename, DspacingBins, - EmptyCanRun, Filename, IofDspacing, IofDspacingTwoTheta, @@ -47,7 +47,7 @@ def params(request): NeXusDetectorName: request.param, Filename[SampleRun]: dream.data.simulated_diamond_sample(), Filename[VanadiumRun]: dream.data.simulated_vanadium_sample(), - Filename[EmptyCanRun]: dream.data.simulated_empty_can(), + Filename[BackgroundRun]: dream.data.simulated_empty_can(), CalibrationFilename: None, UncertaintyBroadcastMode: UncertaintyBroadcastMode.drop, DspacingBins: sc.linspace('dspacing', 0.0, 2.3434, 201, unit='angstrom'), From 6dc10b74d9f826166c28783df13765b7d8d37bc8 Mon Sep 17 00:00:00 2001 From: Simon Heybrock Date: Mon, 26 Aug 2024 10:10:46 +0200 Subject: [PATCH 3/8] Begin using types from ESSreduce --- src/ess/dream/io/geant4.py | 6 +- src/ess/powder/filtering.py | 4 +- src/ess/powder/nexus.py | 6 +- src/ess/powder/types.py | 107 +------------------------------ src/ess/snspowder/powgen/data.py | 8 +-- 5 files changed, 15 insertions(+), 116 deletions(-) diff --git a/src/ess/dream/io/geant4.py b/src/ess/dream/io/geant4.py index 889eb0f1..922637af 100644 --- a/src/ess/dream/io/geant4.py +++ b/src/ess/dream/io/geant4.py @@ -9,6 +9,7 @@ from ess.powder.types import ( CalibrationData, CalibrationFilename, + DetectorData, Filename, NeXusDetector, NeXusDetectorDimensions, @@ -16,7 +17,6 @@ RawDetector, RawSample, RawSource, - ReducibleDetectorData, RunType, SamplePosition, SampleRun, @@ -179,8 +179,8 @@ def patch_detector_data( detector_data: RawDetector[RunType], source_position: SourcePosition[RunType], sample_position: SamplePosition[RunType], -) -> ReducibleDetectorData[RunType]: - return ReducibleDetectorData[RunType]( +) -> DetectorData[RunType]: + return DetectorData[RunType]( detector_data.assign_coords( source_position=source_position, sample_position=sample_position ) diff --git a/src/ess/powder/filtering.py b/src/ess/powder/filtering.py index f49ab736..4c7b0719 100644 --- a/src/ess/powder/filtering.py +++ b/src/ess/powder/filtering.py @@ -12,7 +12,7 @@ import scipp as sc -from .types import FilteredData, ReducibleDetectorData, RunType +from .types import DetectorData, FilteredData, RunType def _equivalent_bin_indices(a, b) -> bool: @@ -72,7 +72,7 @@ def remove_bad_pulses( return filtered -def filter_events(data: ReducibleDetectorData[RunType]) -> FilteredData[RunType]: +def filter_events(data: DetectorData[RunType]) -> FilteredData[RunType]: """Remove bad events. Attention diff --git a/src/ess/powder/nexus.py b/src/ess/powder/nexus.py index 46728900..daca6970 100644 --- a/src/ess/powder/nexus.py +++ b/src/ess/powder/nexus.py @@ -11,6 +11,7 @@ from ess.powder.types import ( DetectorBankSizes, + DetectorData, DetectorEventData, Filename, MonitorEventData, @@ -24,7 +25,6 @@ RawMonitorData, RawSample, RawSource, - ReducibleDetectorData, RunType, SamplePosition, SourcePosition, @@ -166,7 +166,7 @@ def assemble_detector_data( event_data: DetectorEventData[RunType], source_position: SourcePosition[RunType], sample_position: SamplePosition[RunType], -) -> ReducibleDetectorData[RunType]: +) -> DetectorData[RunType]: """ Assemble a detector data array with event data and source- and sample-position. @@ -175,7 +175,7 @@ def assemble_detector_data( grouped = nexus.group_event_data( event_data=event_data, detector_number=detector.coords['detector_number'] ) - return ReducibleDetectorData[RunType]( + return DetectorData[RunType]( _add_variances(grouped) .assign_coords(source_position=source_position, sample_position=sample_position) .assign_coords(detector.coords) diff --git a/src/ess/powder/types.py b/src/ess/powder/types.py index fa30aa71..810c7fc1 100644 --- a/src/ess/powder/types.py +++ b/src/ess/powder/types.py @@ -16,6 +16,8 @@ from ess.reduce.nexus import types as reduce_t from ess.reduce.uncertainty import UncertaintyBroadcastMode as _UncertaintyBroadcastMode +# 1 TypeVars used to parametrize the generic parts of the workflow + BackgroundRun = reduce_gt.BackgroundRun CalibratedDetector = reduce_gt.CalibratedDetector CalibratedMonitor = reduce_gt.CalibratedMonitor @@ -37,57 +39,22 @@ TransmissionRun = reduce_gt.TransmissionRun SamplePosition = reduce_gt.SamplePosition SourcePosition = reduce_gt.SourcePosition +VanadiumRun = reduce_gt.VanadiumRun DetectorBankSizes = reduce_t.DetectorBankSizes NeXusDetectorName = reduce_t.NeXusDetectorName -# 1 TypeVars used to parametrize the generic parts of the workflow - -# 1.1 Run types -BackgroundRun = NewType("BackgroundRun", int) -"""Empty sample can run.""" -EmptyBeamRun = NewType("EmptyBeamRun", int) -"""Empty instrument run.""" -SampleRun = NewType("SampleRun", int) -"""Sample run.""" -VanadiumRun = NewType("VanadiumRun", int) -"""Vanadium run.""" -RunType = TypeVar("RunType", EmptyBeamRun, SampleRun, VanadiumRun) -"""TypeVar used for specifying the run.""" - -# 1.2 Monitor types -Monitor1 = NewType('Monitor1', int) -"""Placeholder for monitor 1.""" -Monitor2 = NewType('Monitor2', int) -"""Placeholder for monitor 2.""" -MonitorType = TypeVar('MonitorType', Monitor1, Monitor2) -"""TypeVar used for identifying a monitor""" - # 2 Workflow parameters -DetectorBankSizes = NewType("DetectorBankSizes", dict[str, dict[str, int | Any]]) - CalibrationFilename = NewType("CalibrationFilename", str | None) """Filename of the instrument calibration file.""" -NeXusDetectorName = NewType("NeXusDetectorName", str) -"""Name of detector entry in NeXus file""" - - -class NeXusMonitorName(sciline.Scope[MonitorType, str], str): - """Name of Incident|Transmission monitor in NeXus file""" - - DspacingBins = NewType("DSpacingBins", sc.Variable) """Bin edges for d-spacing.""" -class Filename(sciline.Scope[RunType, str], str): - """Name of an input file.""" - - OutFilename = NewType("OutFilename", str) """Filename of the output.""" @@ -122,8 +89,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: CalibrationData = NewType("CalibrationData", sc.Dataset | None) """Detector calibration data.""" -DataFolder = NewType("DataFolder", str) - class DataWithScatteringCoordinates(sciline.Scope[RunType, sc.DataArray], sc.DataArray): """Data with scattering coordinates computed for all events: wavelength, 2theta, @@ -168,48 +133,6 @@ class FocussedDataDspacingTwoTheta(sciline.Scope[RunType, sc.DataArray], sc.Data """Data that has been normalized by a vanadium run, and grouped into 2theta bins.""" -class NeXusDetector(sciline.Scope[RunType, sc.DataGroup], sc.DataGroup): - """ - Detector loaded from a NeXus file, without event data. - - Contains detector numbers, pixel shape information, transformations, ... - """ - - -class NeXusMonitor( - sciline.ScopeTwoParams[RunType, MonitorType, sc.DataGroup], sc.DataGroup -): - """ - Monitor loaded from a NeXus file, without event data. - - Contains detector numbers, pixel shape information, transformations, ... - """ - - -class DetectorEventData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): - """Event data loaded from a detector in a NeXus file""" - - -class MonitorEventData( - sciline.ScopeTwoParams[RunType, MonitorType, sc.DataArray], sc.DataArray -): - """Event data loaded from a monitor in a NeXus file""" - - -class RawMonitor( - sciline.ScopeTwoParams[RunType, MonitorType, sc.DataArray], sc.DataArray -): - """Raw monitor data""" - - -class RawMonitorData( - sciline.ScopeTwoParams[RunType, MonitorType, sc.DataArray], sc.DataArray -): - """Raw monitor data where variances and necessary coordinates - (e.g. source position) have been added, and where optionally some - user configuration was applied to some of the coordinates.""" - - class MaskedData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): """Data with masked pixels, tof regions, wavelength regions, 2theta regions, or dspacing regions.""" @@ -235,30 +158,6 @@ class RawDataAndMetadata(sciline.Scope[RunType, sc.DataGroup], sc.DataGroup): """Raw data and associated metadata.""" -class RawDetector(sciline.Scope[RunType, sc.DataArray], sc.DataArray): - """Data (events / histogram) extracted from a RawDetector.""" - - -class RawSample(sciline.Scope[RunType, sc.DataGroup], sc.DataGroup): - """Raw data from a loaded sample.""" - - -class RawSource(sciline.Scope[RunType, sc.DataGroup], sc.DataGroup): - """Raw data from a loaded neutron source.""" - - -class ReducibleDetectorData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): - """Data that is in a state ready for reduction.""" - - -class SamplePosition(sciline.Scope[RunType, sc.Variable], sc.Variable): - """Sample position""" - - -class SourcePosition(sciline.Scope[RunType, sc.Variable], sc.Variable): - """Source position""" - - TofMask = NewType("TofMask", Callable | None) """TofMask is a callable that returns a mask for a given TofData.""" diff --git a/src/ess/snspowder/powgen/data.py b/src/ess/snspowder/powgen/data.py index 0ce636a0..ac4c9e28 100644 --- a/src/ess/snspowder/powgen/data.py +++ b/src/ess/snspowder/powgen/data.py @@ -9,11 +9,11 @@ AccumulatedProtonCharge, CalibrationData, CalibrationFilename, + DetectorData, Filename, NeXusDetectorDimensions, ProtonCharge, RawDataAndMetadata, - ReducibleDetectorData, RunType, ) @@ -120,14 +120,14 @@ def pooch_load_calibration( def extract_raw_data( dg: RawDataAndMetadata[RunType], sizes: NeXusDetectorDimensions -) -> ReducibleDetectorData[RunType]: +) -> DetectorData[RunType]: """Return the events from a loaded data group.""" # Remove the tof binning and dimension, as it is not needed and it gets in the way # of masking. out = dg["data"].squeeze() out.coords.pop("tof", None) out = out.fold(dim="spectrum", sizes=sizes) - return ReducibleDetectorData[RunType](out) + return DetectorData[RunType](out) def extract_proton_charge(dg: RawDataAndMetadata[RunType]) -> ProtonCharge[RunType]: @@ -136,7 +136,7 @@ def extract_proton_charge(dg: RawDataAndMetadata[RunType]) -> ProtonCharge[RunTy def extract_accumulated_proton_charge( - data: ReducibleDetectorData[RunType], + data: DetectorData[RunType], ) -> AccumulatedProtonCharge[RunType]: """Return the stored accumulated proton charge from a loaded data group.""" return AccumulatedProtonCharge[RunType](data.coords["gd_prtn_chrg"]) From f464fcde0ce8a37501afff6b13305f3e314d3217 Mon Sep 17 00:00:00 2001 From: Simon Heybrock Date: Mon, 26 Aug 2024 11:05:23 +0200 Subject: [PATCH 4/8] Refactor geant4 tests --- src/ess/dream/io/geant4.py | 91 ++++----- src/ess/dream/io/nexus.py | 5 +- src/ess/dream/workflow.py | 23 ++- src/ess/powder/__init__.py | 1 - src/ess/powder/nexus.py | 289 --------------------------- src/ess/powder/types.py | 2 + tests/dream/geant4_reduction_test.py | 88 ++++---- tests/dream/io/geant4_test.py | 30 ++- 8 files changed, 112 insertions(+), 417 deletions(-) delete mode 100644 src/ess/powder/nexus.py diff --git a/src/ess/dream/io/geant4.py b/src/ess/dream/io/geant4.py index 922637af..d41f77f9 100644 --- a/src/ess/dream/io/geant4.py +++ b/src/ess/dream/io/geant4.py @@ -4,23 +4,19 @@ import numpy as np import sciline import scipp as sc -from ess.reduce.nexus import extract_detector_data +from ess.reduce.nexus.generic_workflow import GenericNeXusWorkflow from ess.powder.types import ( + CalibratedDetector, CalibrationData, CalibrationFilename, DetectorData, Filename, NeXusDetector, - NeXusDetectorDimensions, NeXusDetectorName, - RawDetector, - RawSample, - RawSource, + NeXusSample, + NeXusSource, RunType, - SamplePosition, - SampleRun, - SourcePosition, ) MANTLE_DETECTOR_ID = sc.index(7) @@ -69,13 +65,6 @@ def extract_geant4_detector( return NeXusDetector[RunType](detectors["instrument"][detector_name]) -def extract_geant4_detector_data( - detector: NeXusDetector[RunType], -) -> RawDetector[RunType]: - """Extract the histogram or event data from a loaded GEANT4 detector.""" - return RawDetector[RunType](extract_detector_data(detector)) - - def _load_raw_events(file_path: str) -> sc.DataArray: table = sc.io.load_csv( file_path, sep="\t", header_parser="bracket", data_columns=[] @@ -167,53 +156,43 @@ def _extract_detector( return events -def get_source_position(raw_source: RawSource[RunType]) -> SourcePosition[RunType]: - return SourcePosition[RunType](raw_source["position"]) - - -def get_sample_position(raw_sample: RawSample[RunType]) -> SamplePosition[RunType]: - return SamplePosition[RunType](raw_sample["position"]) +def geant4_load_calibration(filename: CalibrationFilename) -> CalibrationData: + if filename is not None: + # Needed to build a complete pipeline. + raise NotImplementedError( + "Calibration data loading is not implemented for DREAM GEANT4 data." + ) + return CalibrationFilename(None) -def patch_detector_data( - detector_data: RawDetector[RunType], - source_position: SourcePosition[RunType], - sample_position: SamplePosition[RunType], +def dummy_assemble_detector_data( + detector: CalibratedDetector[RunType], ) -> DetectorData[RunType]: - return DetectorData[RunType]( - detector_data.assign_coords( - source_position=source_position, sample_position=sample_position - ) - ) + """Dummy assembly of detector data, detector already contains neutron data.""" + return DetectorData[RunType](detector) -def geant4_detector_dimensions( - data: RawDetector[SampleRun], -) -> NeXusDetectorDimensions: - # For geant4 data, we group by detector identifier, so the data already has - # logical dimensions, so we simply return the dimensions of the detector. - return NeXusDetectorDimensions(data.sizes) +def dummy_source() -> NeXusSource[RunType]: + return NeXusSource[RunType]( + sc.DataGroup(position=sc.vector([np.nan, np.nan, np.nan], unit="mm")) + ) -def geant4_load_calibration( - filename: CalibrationFilename, -) -> CalibrationData: - if filename is not None: - # Needed to build a complete pipeline. - raise NotImplementedError( - "Calibration data loading is not implemented for DREAM GEANT4 data." - ) - return CalibrationFilename(None) +def dummy_sample() -> NeXusSample[RunType]: + return NeXusSample[RunType]( + sc.DataGroup(position=sc.vector([np.nan, np.nan, np.nan], unit="mm")) + ) -providers = ( - extract_geant4_detector, - extract_geant4_detector_data, - load_geant4_csv, - get_sample_position, - get_source_position, - patch_detector_data, - geant4_detector_dimensions, - geant4_load_calibration, -) -"""Geant4-providers for Sciline pipelines.""" +def LoadGeant4Workflow() -> sciline.Pipeline: + """ + Workflow for loading NeXus data. + """ + wf = GenericNeXusWorkflow() + wf.insert(extract_geant4_detector) + wf.insert(load_geant4_csv) + wf.insert(geant4_load_calibration) + wf.insert(dummy_assemble_detector_data) + wf.insert(dummy_source) + wf.insert(dummy_sample) + return wf diff --git a/src/ess/dream/io/nexus.py b/src/ess/dream/io/nexus.py index b0bb884c..60e58644 100644 --- a/src/ess/dream/io/nexus.py +++ b/src/ess/dream/io/nexus.py @@ -40,10 +40,7 @@ "counter": 2, }, "high_resolution_detector": {"strip": 32, "other": -1}, - "sans_detector": lambda x: x.fold( - dim="detector_number", - sizes={"strip": 32, "other": -1}, - ), + "sans_detector": {"strip": 32, "other": -1}, } diff --git a/src/ess/dream/workflow.py b/src/ess/dream/workflow.py index 0b7b8291..fc238134 100644 --- a/src/ess/dream/workflow.py +++ b/src/ess/dream/workflow.py @@ -7,13 +7,13 @@ from ess.powder import providers as powder_providers from ess.powder.types import ( AccumulatedProtonCharge, - RawSample, - RawSource, + NeXusSample, + NeXusSource, SampleRun, VanadiumRun, ) -from .io.geant4 import providers as geant4_providers +from .io.geant4 import LoadGeant4Workflow def default_parameters() -> dict: @@ -22,10 +22,10 @@ def default_parameters() -> dict: source = sc.DataGroup(position=sc.vector([-3.478, 0.0, -76550], unit="mm")) charge = sc.scalar(1.0, unit="µAh") return { - RawSample[SampleRun]: sample, - RawSample[VanadiumRun]: sample, - RawSource[SampleRun]: source, - RawSource[VanadiumRun]: source, + NeXusSample[SampleRun]: sample, + NeXusSample[VanadiumRun]: sample, + NeXusSource[SampleRun]: source, + NeXusSource[VanadiumRun]: source, AccumulatedProtonCharge[SampleRun]: charge, AccumulatedProtonCharge[VanadiumRun]: charge, } @@ -35,9 +35,12 @@ def DreamGeant4Workflow() -> sciline.Pipeline: """ Workflow with default parameters for the Dream Geant4 simulation. """ - return sciline.Pipeline( - providers=powder_providers + geant4_providers, params=default_parameters() - ) + wf = LoadGeant4Workflow() + for provider in powder_providers: + wf.insert(provider) + for key, value in default_parameters().items(): + wf[key] = value + return wf __all__ = ['DreamGeant4Workflow', 'default_parameters'] diff --git a/src/ess/powder/__init__.py b/src/ess/powder/__init__.py index 60760038..3ce316e2 100644 --- a/src/ess/powder/__init__.py +++ b/src/ess/powder/__init__.py @@ -16,7 +16,6 @@ transform, ) from .masking import with_pixel_mask_filenames -from . import nexus try: __version__ = importlib.metadata.version(__package__ or __name__) diff --git a/src/ess/powder/nexus.py b/src/ess/powder/nexus.py deleted file mode 100644 index daca6970..00000000 --- a/src/ess/powder/nexus.py +++ /dev/null @@ -1,289 +0,0 @@ -# SPDX-License-Identifier: BSD-3-Clause -# Copyright (c) 2024 Scipp contributors (https://github.com/scipp) - -"""NeXus input/output for ESS powder reduction.""" - -from typing import Any - -import scipp as sc -import scippnexus as snx -from ess.reduce import nexus - -from ess.powder.types import ( - DetectorBankSizes, - DetectorData, - DetectorEventData, - Filename, - MonitorEventData, - MonitorType, - NeXusDetector, - NeXusDetectorName, - NeXusMonitor, - NeXusMonitorName, - RawDetector, - RawMonitor, - RawMonitorData, - RawSample, - RawSource, - RunType, - SamplePosition, - SourcePosition, -) - - -def load_nexus_sample(file_path: Filename[RunType]) -> RawSample[RunType]: - return RawSample[RunType](nexus.load_sample(file_path)) - - -def dummy_load_sample(file_path: Filename[RunType]) -> RawSample[RunType]: - """ - In test files there is not always a sample, so we need a dummy. - """ - return RawSample[RunType]( - sc.DataGroup({'position': sc.vector(value=[0, 0, 0], unit='m')}) - ) - - -def load_nexus_source(file_path: Filename[RunType]) -> RawSource[RunType]: - return RawSource[RunType](nexus.load_source(file_path)) - - -def load_nexus_detector( - file_path: Filename[RunType], detector_name: NeXusDetectorName -) -> NeXusDetector[RunType]: - """ - Load detector from NeXus, but with event data replaced by placeholders. - - Currently the placeholder is the detector number, but this may change in the future. - - The returned object is a scipp.DataGroup, as it may contain additional information - about the detector that cannot be represented as a single scipp.DataArray. Most - downstream code will only be interested in the contained scipp.DataArray so this - needs to be extracted. However, other processing steps may require the additional - information, so it is kept in the DataGroup. - - Loading thus proceeds in three steps: - - 1. This function loads the detector, but replaces the event data with placeholders. - 2. :py:func:`get_detector_array` drops the additional information, returning only - the contained scipp.DataArray, reshaped to the logical detector shape. - This will generally contain coordinates as well as pixel masks. - 3. :py:func:`assemble_detector_data` replaces placeholder data values with the - event data, and adds source and sample positions. - """ - definitions = snx.base_definitions() - definitions["NXdetector"] = _StrippedDetector - dg = nexus.load_detector( - file_path=file_path, - detector_name=detector_name, - definitions=definitions, - ) - # The name is required later, e.g., for determining logical detector shape - dg['detector_name'] = detector_name - return NeXusDetector[RunType](dg) - - -def load_nexus_monitor( - file_path: Filename[RunType], monitor_name: NeXusMonitorName[MonitorType] -) -> NeXusMonitor[RunType, MonitorType]: - """ - Load monitor from NeXus, but with event data replaced by placeholders. - - Currently the placeholder is a size-0 array, but this may change in the future. - - The returned object is a scipp.DataGroup, as it may contain additional information - about the monitor that cannot be represented as a single scipp.DataArray. Most - downstream code will only be interested in the contained scipp.DataArray so this - needs to be extracted. However, other processing steps may require the additional - information, so it is kept in the DataGroup. - - Loading thus proceeds in three steps: - - 1. This function loads the monitor, but replaces the event data with placeholders. - 2. :py:func:`get_monitor_array` drops the additional information, returning only - the contained scipp.DataArray. - This will generally contain coordinates as well as pixel masks. - 3. :py:func:`assemble_monitor_data` replaces placeholder data values with the - event data, and adds source and sample positions. - """ - definitions = snx.base_definitions() - definitions["NXmonitor"] = _StrippedMonitor - monitor = nexus.load_monitor( - file_path=file_path, monitor_name=monitor_name, definitions=definitions - ) - return NeXusMonitor[RunType, MonitorType](monitor) - - -def get_source_position( - raw_source: RawSource[RunType], -) -> SourcePosition[RunType]: - return SourcePosition[RunType](raw_source["position"]) - - -def get_sample_position( - raw_sample: RawSample[RunType], -) -> SamplePosition[RunType]: - return SamplePosition[RunType](raw_sample["position"]) - - -def get_detector_signal_array( - detector: NeXusDetector[RunType], - bank_sizes: DetectorBankSizes | None = None, -) -> RawDetector[RunType]: - """ - Extract the data array corresponding to a detector's signal field. - - The returned data array includes coords and masks pertaining directly to the - signal values array, but not additional information about the detector. The - data array is reshaped to the logical detector shape, which by folding the data - array along the detector_number dimension. - """ - da = nexus.extract_detector_data(detector) - if (sizes := (bank_sizes or {}).get(detector['detector_name'])) is not None: - da = da.fold(dim="detector_number", sizes=sizes) - return RawDetector[RunType](da) - - -def get_monitor_signal_array( - monitor: NeXusMonitor[RunType, MonitorType], - source_position: SourcePosition[RunType], -) -> RawMonitor[RunType, MonitorType]: - """ - Extract the data array corresponding to a monitor's signal field. - - The returned data array includes coords pertaining directly to the - signal values array, but not additional information about the monitor. - """ - return RawMonitor[RunType, MonitorType]( - nexus.extract_monitor_data(monitor).assign_coords( - position=monitor['position'], source_position=source_position - ) - ) - - -def assemble_detector_data( - detector: RawDetector[RunType], - event_data: DetectorEventData[RunType], - source_position: SourcePosition[RunType], - sample_position: SamplePosition[RunType], -) -> DetectorData[RunType]: - """ - Assemble a detector data array with event data and source- and sample-position. - - Also adds variances to the event data if they are missing. - """ - grouped = nexus.group_event_data( - event_data=event_data, detector_number=detector.coords['detector_number'] - ) - return DetectorData[RunType]( - _add_variances(grouped) - .assign_coords(source_position=source_position, sample_position=sample_position) - .assign_coords(detector.coords) - .assign_masks(detector.masks) - ) - - -def assemble_monitor_data( - monitor: RawMonitor[RunType, MonitorType], - event_data: MonitorEventData[RunType, MonitorType], -) -> RawMonitorData[RunType, MonitorType]: - """ - Assemble a monitor data array with event data. - - Also adds variances to the event data if they are missing. - """ - da = event_data.assign_coords(monitor.coords).assign_masks(monitor.masks) - return RawMonitorData[RunType, MonitorType](_add_variances(da=da)) - - -def _drop( - children: dict[str, snx.Field | snx.Group], classes: tuple[snx.NXobject, ...] -) -> dict[str, snx.Field | snx.Group]: - return { - name: child - for name, child in children.items() - if not (isinstance(child, snx.Group) and (child.nx_class in classes)) - } - - -class _StrippedDetector(snx.NXdetector): - """Detector definition without large geometry or event data for ScippNexus. - - Drops NXoff_geometry and NXevent_data groups, data is replaced by detector_number. - """ - - def __init__( - self, attrs: dict[str, Any], children: dict[str, snx.Field | snx.Group] - ): - children = _drop(children, (snx.NXoff_geometry, snx.NXevent_data)) - children['data'] = children['detector_number'] - super().__init__(attrs=attrs, children=children) - - -class _DummyField: - """Dummy field that can replace snx.Field in NXmonitor.""" - - def __init__(self): - self.attrs = {} - self.sizes = {'event_time_zero': 0} - self.dims = ('event_time_zero',) - self.shape = (0,) - - def __getitem__(self, key: Any) -> sc.Variable: - return sc.empty(dims=self.dims, shape=self.shape, unit=None) - - -class _StrippedMonitor(snx.NXmonitor): - """Monitor definition without event data for ScippNexus. - - Drops NXevent_data group, data is replaced by a dummy field. - """ - - def __init__( - self, attrs: dict[str, Any], children: dict[str, snx.Field | snx.Group] - ): - children = _drop(children, (snx.NXevent_data,)) - children['data'] = _DummyField() - super().__init__(attrs=attrs, children=children) - - -def load_detector_event_data( - file_path: Filename[RunType], detector_name: NeXusDetectorName -) -> DetectorEventData[RunType]: - da = nexus.load_event_data(file_path=file_path, component_name=detector_name) - return DetectorEventData[RunType](da) - - -def load_monitor_event_data( - file_path: Filename[RunType], monitor_name: NeXusMonitorName[MonitorType] -) -> MonitorEventData[RunType, MonitorType]: - da = nexus.load_event_data(file_path=file_path, component_name=monitor_name) - return MonitorEventData[RunType, MonitorType](da) - - -def _add_variances(da: sc.DataArray) -> sc.DataArray: - out = da.copy(deep=False) - if out.bins is not None: - content = out.bins.constituents['data'] - if content.variances is None: - content.variances = content.values - return out - - -providers = ( - assemble_detector_data, - assemble_monitor_data, - get_detector_signal_array, - get_monitor_signal_array, - get_sample_position, - get_source_position, - load_detector_event_data, - load_monitor_event_data, - load_nexus_detector, - load_nexus_monitor, - load_nexus_sample, - load_nexus_source, -) -""" -Providers for loading and processing NeXus data. -""" diff --git a/src/ess/powder/types.py b/src/ess/powder/types.py index 810c7fc1..bc7a8ec2 100644 --- a/src/ess/powder/types.py +++ b/src/ess/powder/types.py @@ -32,6 +32,8 @@ NeXusMonitorName = reduce_gt.NeXusMonitorName NeXusDetector = reduce_gt.NeXusDetector NeXusMonitor = reduce_gt.NeXusMonitor +NeXusSample = reduce_gt.NeXusSample +NeXusSource = reduce_gt.NeXusSource RunType = reduce_gt.RunType SampleRun = reduce_gt.SampleRun ScatteringRunType = reduce_gt.ScatteringRunType diff --git a/tests/dream/geant4_reduction_test.py b/tests/dream/geant4_reduction_test.py index a45f93c6..8bb869c5 100644 --- a/tests/dream/geant4_reduction_test.py +++ b/tests/dream/geant4_reduction_test.py @@ -6,6 +6,7 @@ import scipp as sc from ess import dream, powder +import ess.dream.data # noqa: F401 from ess.powder.types import ( AccumulatedProtonCharge, BackgroundRun, @@ -16,9 +17,9 @@ IofDspacingTwoTheta, MaskedData, NeXusDetectorName, + NeXusSample, + NeXusSource, NormalizedByProtonCharge, - RawSample, - RawSource, SampleRun, TofMask, TwoThetaBins, @@ -53,10 +54,10 @@ def params(request): DspacingBins: sc.linspace('dspacing', 0.0, 2.3434, 201, unit='angstrom'), TofMask: lambda x: (x < sc.scalar(0.0, unit='ns')) | (x > sc.scalar(86e6, unit='ns')), - RawSample[SampleRun]: sample, - RawSample[VanadiumRun]: sample, - RawSource[SampleRun]: source, - RawSource[VanadiumRun]: source, + NeXusSample[SampleRun]: sample, + NeXusSample[VanadiumRun]: sample, + NeXusSource[SampleRun]: source, + NeXusSource[VanadiumRun]: source, AccumulatedProtonCharge[SampleRun]: charge, AccumulatedProtonCharge[VanadiumRun]: charge, TwoThetaMask: None, @@ -64,60 +65,59 @@ def params(request): } -def test_can_create_pipeline(providers, params): - sciline.Pipeline(providers, params=params) +@pytest.fixture() +def workflow(params): + wf = dream.DreamGeant4Workflow() + for key, value in params.items(): + wf[key] = value + return wf -def test_pipeline_can_compute_dspacing_result(providers, params): - pipeline = sciline.Pipeline(providers, params=params) - pipeline = powder.with_pixel_mask_filenames(pipeline, []) - result = pipeline.compute(IofDspacing) +def test_pipeline_can_compute_dspacing_result(workflow): + workflow = powder.with_pixel_mask_filenames(workflow, []) + result = workflow.compute(IofDspacing) assert result.sizes == { - 'dspacing': len(params[DspacingBins]) - 1, + 'dspacing': len(workflow.compute(DspacingBins)) - 1, } - assert sc.identical(result.coords['dspacing'], params[DspacingBins]) + assert sc.identical(result.coords['dspacing'], workflow.compute(DspacingBins)) -def test_workflow_is_deterministic(providers, params): - pipeline = sciline.Pipeline(providers, params=params) - pipeline = powder.with_pixel_mask_filenames(pipeline, []) +def test_workflow_is_deterministic(workflow): + workflow = powder.with_pixel_mask_filenames(workflow, []) # This is Sciline's default scheduler, but we want to be explicit here scheduler = sciline.scheduler.DaskScheduler() - graph = pipeline.get(IofDspacing, scheduler=scheduler) + graph = workflow.get(IofDspacing, scheduler=scheduler) reference = graph.compute().data result = graph.compute().data assert sc.identical(sc.values(result), sc.values(reference)) -def test_pipeline_can_compute_intermediate_results(providers, params): - pipeline = sciline.Pipeline(providers, params=params) - pipeline = powder.with_pixel_mask_filenames(pipeline, []) - result = pipeline.compute(NormalizedByProtonCharge[SampleRun]) +def test_pipeline_can_compute_intermediate_results(workflow): + workflow = powder.with_pixel_mask_filenames(workflow, []) + result = workflow.compute(NormalizedByProtonCharge[SampleRun]) assert set(result.dims) == {'segment', 'wire', 'counter', 'strip', 'module'} -def test_pipeline_group_by_two_theta(providers, params): - params[TwoThetaBins] = sc.linspace( +def test_pipeline_group_by_two_theta(workflow): + workflow[TwoThetaBins] = sc.linspace( dim='two_theta', unit='rad', start=0.8, stop=2.4, num=17 ) - pipeline = sciline.Pipeline(providers, params=params) - pipeline = powder.with_pixel_mask_filenames(pipeline, []) - result = pipeline.compute(IofDspacingTwoTheta) + workflow = powder.with_pixel_mask_filenames(workflow, []) + result = workflow.compute(IofDspacingTwoTheta) assert result.sizes == { 'two_theta': 16, - 'dspacing': len(params[DspacingBins]) - 1, + 'dspacing': len(workflow.compute(DspacingBins)) - 1, } - assert sc.identical(result.coords['dspacing'], params[DspacingBins]) - assert sc.allclose(result.coords['two_theta'], params[TwoThetaBins]) + assert sc.identical(result.coords['dspacing'], workflow.compute(DspacingBins)) + assert sc.allclose(result.coords['two_theta'], workflow.compute(TwoThetaBins)) -def test_pipeline_wavelength_masking(providers, params): +def test_pipeline_wavelength_masking(workflow): wmin = sc.scalar(0.18, unit="angstrom") wmax = sc.scalar(0.21, unit="angstrom") - params[WavelengthMask] = lambda x: (x > wmin) & (x < wmax) - pipeline = sciline.Pipeline(providers, params=params) - pipeline = powder.with_pixel_mask_filenames(pipeline, []) - masked_sample = pipeline.compute(MaskedData[SampleRun]) + workflow[WavelengthMask] = lambda x: (x > wmin) & (x < wmax) + workflow = powder.with_pixel_mask_filenames(workflow, []) + masked_sample = workflow.compute(MaskedData[SampleRun]) assert 'wavelength' in masked_sample.bins.masks sum_in_masked_region = ( masked_sample.bin(wavelength=sc.concat([wmin, wmax], dim='wavelength')) @@ -130,13 +130,12 @@ def test_pipeline_wavelength_masking(providers, params): ) -def test_pipeline_two_theta_masking(providers, params): +def test_pipeline_two_theta_masking(workflow): tmin = sc.scalar(1.0, unit="rad") tmax = sc.scalar(1.2, unit="rad") - params[TwoThetaMask] = lambda x: (x > tmin) & (x < tmax) - pipeline = sciline.Pipeline(providers, params=params) - pipeline = powder.with_pixel_mask_filenames(pipeline, []) - masked_sample = pipeline.compute(MaskedData[SampleRun]) + workflow[TwoThetaMask] = lambda x: (x > tmin) & (x < tmax) + workflow = powder.with_pixel_mask_filenames(workflow, []) + masked_sample = workflow.compute(MaskedData[SampleRun]) assert 'two_theta' in masked_sample.masks sum_in_masked_region = ( masked_sample.bin(two_theta=sc.concat([tmin, tmax], dim='two_theta')).sum().data @@ -147,13 +146,10 @@ def test_pipeline_two_theta_masking(providers, params): ) -def test_use_workflow_helper(params): - workflow = dream.DreamGeant4Workflow() - for key, value in params.items(): - workflow[key] = value +def test_use_workflow_helper(workflow): workflow = powder.with_pixel_mask_filenames(workflow, []) result = workflow.compute(IofDspacing) assert result.sizes == { - 'dspacing': len(params[DspacingBins]) - 1, + 'dspacing': len(workflow.compute(DspacingBins)) - 1, } - assert sc.identical(result.coords['dspacing'], params[DspacingBins]) + assert sc.identical(result.coords['dspacing'], workflow.compute(DspacingBins)) diff --git a/tests/dream/io/geant4_test.py b/tests/dream/io/geant4_test.py index 70adecb0..4ca29f04 100644 --- a/tests/dream/io/geant4_test.py +++ b/tests/dream/io/geant4_test.py @@ -6,12 +6,19 @@ import numpy as np import pytest -import sciline import scipp as sc import scipp.testing from ess.dream import data, load_geant4_csv -from ess.powder.types import Filename, NeXusDetectorName, RawDetector, SampleRun +from ess.dream.io.geant4 import LoadGeant4Workflow +from ess.powder.types import ( + Filename, + NeXusDetector, + NeXusDetectorName, + NeXusSample, + NeXusSource, + SampleRun, +) @pytest.fixture(scope="module") @@ -171,15 +178,16 @@ def test_load_geant4_csv_sans_has_expected_coords(file): def test_geant4_in_pipeline(file_path, file): - from ess.dream.io.geant4 import providers - - pipeline = sciline.Pipeline( - providers, - params={ - Filename[SampleRun]: file_path, - NeXusDetectorName: NeXusDetectorName("mantle"), - }, + pipeline = LoadGeant4Workflow() + pipeline[Filename[SampleRun]] = file_path + pipeline[NeXusDetectorName] = NeXusDetectorName("mantle") + pipeline[NeXusSample[SampleRun]] = sc.DataGroup( + position=sc.vector([0.0, 0.0, 0.0], unit="mm") + ) + pipeline[NeXusSource[SampleRun]] = sc.DataGroup( + position=sc.vector([-3.478, 0.0, -76550], unit="mm") ) - detector = pipeline.compute(RawDetector[SampleRun]) + + detector = pipeline.compute(NeXusDetector[SampleRun])['events'] expected = load_geant4_csv(file)["instrument"]["mantle"]["events"] sc.testing.assert_identical(detector, expected) From 4da8d9565781ae0b48bc18da4561942648e03810 Mon Sep 17 00:00:00 2001 From: Simon Heybrock <12912489+SimonHeybrock@users.noreply.github.com> Date: Mon, 26 Aug 2024 13:09:36 +0200 Subject: [PATCH 5/8] Apply suggestions from code review Co-authored-by: Jan-Lukas Wynen --- src/ess/dream/io/geant4.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ess/dream/io/geant4.py b/src/ess/dream/io/geant4.py index d41f77f9..716c6257 100644 --- a/src/ess/dream/io/geant4.py +++ b/src/ess/dream/io/geant4.py @@ -162,7 +162,7 @@ def geant4_load_calibration(filename: CalibrationFilename) -> CalibrationData: raise NotImplementedError( "Calibration data loading is not implemented for DREAM GEANT4 data." ) - return CalibrationFilename(None) + return CalibrationData(None) def dummy_assemble_detector_data( From 8cf2ccd68fc1bd20bbd031cf234d0b1a6e507647 Mon Sep 17 00:00:00 2001 From: Simon Heybrock Date: Mon, 26 Aug 2024 13:14:16 +0200 Subject: [PATCH 6/8] Use dict in tests --- tests/dream/geant4_reduction_test.py | 75 ++++++++++++++-------------- 1 file changed, 37 insertions(+), 38 deletions(-) diff --git a/tests/dream/geant4_reduction_test.py b/tests/dream/geant4_reduction_test.py index 8bb869c5..b1d2ac23 100644 --- a/tests/dream/geant4_reduction_test.py +++ b/tests/dream/geant4_reduction_test.py @@ -37,38 +37,40 @@ def providers(): return [*powder.providers, *geant4_providers] +sample = sc.DataGroup(position=sc.vector([0.0, 0.0, 0.0], unit='mm')) +source = sc.DataGroup(position=sc.vector([-3.478, 0.0, -76550], unit='mm')) +charge = sc.scalar(1.0, unit='µAh') + +params = { + Filename[SampleRun]: dream.data.simulated_diamond_sample(), + Filename[VanadiumRun]: dream.data.simulated_vanadium_sample(), + Filename[BackgroundRun]: dream.data.simulated_empty_can(), + CalibrationFilename: None, + UncertaintyBroadcastMode: UncertaintyBroadcastMode.drop, + DspacingBins: sc.linspace('dspacing', 0.0, 2.3434, 201, unit='angstrom'), + TofMask: lambda x: (x < sc.scalar(0.0, unit='ns')) + | (x > sc.scalar(86e6, unit='ns')), + NeXusSample[SampleRun]: sample, + NeXusSample[VanadiumRun]: sample, + NeXusSource[SampleRun]: source, + NeXusSource[VanadiumRun]: source, + AccumulatedProtonCharge[SampleRun]: charge, + AccumulatedProtonCharge[VanadiumRun]: charge, + TwoThetaMask: None, + WavelengthMask: None, +} + + @pytest.fixture(params=["mantle", "endcap_backward", "endcap_forward"]) -def params(request): +def params_for_det(request): # Not available in simulated data - sample = sc.DataGroup(position=sc.vector([0.0, 0.0, 0.0], unit='mm')) - source = sc.DataGroup(position=sc.vector([-3.478, 0.0, -76550], unit='mm')) - charge = sc.scalar(1.0, unit='µAh') - - return { - NeXusDetectorName: request.param, - Filename[SampleRun]: dream.data.simulated_diamond_sample(), - Filename[VanadiumRun]: dream.data.simulated_vanadium_sample(), - Filename[BackgroundRun]: dream.data.simulated_empty_can(), - CalibrationFilename: None, - UncertaintyBroadcastMode: UncertaintyBroadcastMode.drop, - DspacingBins: sc.linspace('dspacing', 0.0, 2.3434, 201, unit='angstrom'), - TofMask: lambda x: (x < sc.scalar(0.0, unit='ns')) - | (x > sc.scalar(86e6, unit='ns')), - NeXusSample[SampleRun]: sample, - NeXusSample[VanadiumRun]: sample, - NeXusSource[SampleRun]: source, - NeXusSource[VanadiumRun]: source, - AccumulatedProtonCharge[SampleRun]: charge, - AccumulatedProtonCharge[VanadiumRun]: charge, - TwoThetaMask: None, - WavelengthMask: None, - } + return {**params, NeXusDetectorName: request.param} @pytest.fixture() -def workflow(params): +def workflow(params_for_det): wf = dream.DreamGeant4Workflow() - for key, value in params.items(): + for key, value in params_for_det.items(): wf[key] = value return wf @@ -76,10 +78,8 @@ def workflow(params): def test_pipeline_can_compute_dspacing_result(workflow): workflow = powder.with_pixel_mask_filenames(workflow, []) result = workflow.compute(IofDspacing) - assert result.sizes == { - 'dspacing': len(workflow.compute(DspacingBins)) - 1, - } - assert sc.identical(result.coords['dspacing'], workflow.compute(DspacingBins)) + assert result.sizes == {'dspacing': len(params[DspacingBins]) - 1} + assert sc.identical(result.coords['dspacing'], params[DspacingBins]) def test_workflow_is_deterministic(workflow): @@ -99,17 +99,18 @@ def test_pipeline_can_compute_intermediate_results(workflow): def test_pipeline_group_by_two_theta(workflow): - workflow[TwoThetaBins] = sc.linspace( + two_theta_bins = sc.linspace( dim='two_theta', unit='rad', start=0.8, stop=2.4, num=17 ) + workflow[TwoThetaBins] = two_theta_bins workflow = powder.with_pixel_mask_filenames(workflow, []) result = workflow.compute(IofDspacingTwoTheta) assert result.sizes == { 'two_theta': 16, - 'dspacing': len(workflow.compute(DspacingBins)) - 1, + 'dspacing': len(params[DspacingBins]) - 1, } - assert sc.identical(result.coords['dspacing'], workflow.compute(DspacingBins)) - assert sc.allclose(result.coords['two_theta'], workflow.compute(TwoThetaBins)) + assert sc.identical(result.coords['dspacing'], params[DspacingBins]) + assert sc.allclose(result.coords['two_theta'], two_theta_bins) def test_pipeline_wavelength_masking(workflow): @@ -149,7 +150,5 @@ def test_pipeline_two_theta_masking(workflow): def test_use_workflow_helper(workflow): workflow = powder.with_pixel_mask_filenames(workflow, []) result = workflow.compute(IofDspacing) - assert result.sizes == { - 'dspacing': len(workflow.compute(DspacingBins)) - 1, - } - assert sc.identical(result.coords['dspacing'], workflow.compute(DspacingBins)) + assert result.sizes == {'dspacing': len(params[DspacingBins]) - 1} + assert sc.identical(result.coords['dspacing'], params[DspacingBins]) From 8ae0d543cab1ba413e91626f5f547065eda48c1e Mon Sep 17 00:00:00 2001 From: Simon Heybrock Date: Mon, 26 Aug 2024 13:17:03 +0200 Subject: [PATCH 7/8] Bump essreduce --- pyproject.toml | 2 +- requirements/base.in | 2 +- requirements/base.txt | 32 +++++++++++++++++++++----------- requirements/basetest.txt | 4 ++-- requirements/ci.txt | 6 +++--- requirements/dev.txt | 4 ++-- requirements/docs.txt | 10 +++++----- requirements/mypy.txt | 2 +- requirements/nightly.txt | 22 ++++++++++++---------- 9 files changed, 48 insertions(+), 36 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1acf1608..96989acd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ requires-python = ">=3.10" # Make sure to list one dependency per line. dependencies = [ "dask", - "essreduce>=24.07.1", + "essreduce>=24.08.2", "graphviz", "numpy", "plopp", diff --git a/requirements/base.in b/requirements/base.in index 48d4d054..1f552205 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -3,7 +3,7 @@ # --- END OF CUSTOM SECTION --- # The following was generated by 'tox -e deps', DO NOT EDIT MANUALLY! dask -essreduce>=24.07.1 +essreduce>=24.08.2 graphviz numpy plopp diff --git a/requirements/base.txt b/requirements/base.txt index 4e0d80ef..95e3a21b 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,4 +1,4 @@ -# SHA1:644597105dd89e09109382941ce5eb8d7de1db9d +# SHA1:2ff97b6a96b750574b258e7cfc322df8621ef521 # # This file is autogenerated by pip-compile-multi # To update, run: @@ -19,12 +19,14 @@ cyclebane==24.6.0 # via sciline cycler==0.12.1 # via matplotlib -dask==2024.8.0 +dask==2024.8.1 # via -r base.in decorator==5.1.1 # via ipython -essreduce==24.8.0 +essreduce==24.8.2 # via -r base.in +exceptiongroup==1.2.2 + # via ipython executing==2.0.1 # via stack-data fonttools==4.53.1 @@ -37,23 +39,25 @@ h5py==3.11.0 # via # scippneutron # scippnexus +importlib-metadata==8.4.0 + # via dask ipydatawidgets==4.3.5 # via pythreejs ipython==8.26.0 # via ipywidgets -ipywidgets==8.1.3 +ipywidgets==8.1.5 # via # ipydatawidgets # pythreejs jedi==0.19.1 # via ipython -jupyterlab-widgets==3.0.11 +jupyterlab-widgets==3.0.13 # via ipywidgets kiwisolver==1.4.5 # via matplotlib locket==1.0.0 # via partd -matplotlib==3.9.1.post1 +matplotlib==3.9.2 # via # mpltoolbox # plopp @@ -63,7 +67,7 @@ mpltoolbox==24.5.1 # via scippneutron networkx==3.3 # via cyclebane -numpy==2.0.1 +numpy==2.1.0 # via # -r base.in # contourpy @@ -99,7 +103,7 @@ pure-eval==0.2.3 # via stack-data pygments==2.18.0 # via ipython -pyparsing==3.1.2 +pyparsing==3.1.4 # via matplotlib python-dateutil==2.9.0.post0 # via @@ -110,7 +114,9 @@ pythreejs==2.4.2 pyyaml==6.0.2 # via dask sciline==24.6.2 - # via -r base.in + # via + # -r base.in + # essreduce scipp==24.8.0 # via # -r base.in @@ -124,7 +130,7 @@ scippnexus==24.8.1 # -r base.in # essreduce # scippneutron -scipy==1.14.0 +scipy==1.14.1 # via # scippneutron # scippnexus @@ -148,7 +154,11 @@ traitlets==5.14.3 # traittypes traittypes==0.2.1 # via ipydatawidgets +typing-extensions==4.12.2 + # via ipython wcwidth==0.2.13 # via prompt-toolkit -widgetsnbextension==4.0.11 +widgetsnbextension==4.0.13 # via ipywidgets +zipp==3.20.0 + # via importlib-metadata diff --git a/requirements/basetest.txt b/requirements/basetest.txt index 29cc9ca2..050792e5 100644 --- a/requirements/basetest.txt +++ b/requirements/basetest.txt @@ -11,11 +11,11 @@ charset-normalizer==3.3.2 # via requests exceptiongroup==1.2.2 # via pytest -idna==3.7 +idna==3.8 # via requests iniconfig==2.0.0 # via pytest -numpy==2.0.1 +numpy==2.1.0 # via # -r basetest.in # pandas diff --git a/requirements/ci.txt b/requirements/ci.txt index 6bb608a6..1b6bca87 100644 --- a/requirements/ci.txt +++ b/requirements/ci.txt @@ -5,7 +5,7 @@ # # pip-compile-multi # -cachetools==5.4.0 +cachetools==5.5.0 # via tox certifi==2024.7.4 # via requests @@ -25,7 +25,7 @@ gitdb==4.0.11 # via gitpython gitpython==3.1.43 # via -r ci.in -idna==3.7 +idna==3.8 # via requests packaging==24.1 # via @@ -48,7 +48,7 @@ tomli==2.0.1 # via # pyproject-api # tox -tox==4.17.1 +tox==4.18.0 # via -r ci.in urllib3==2.2.2 # via requests diff --git a/requirements/dev.txt b/requirements/dev.txt index 4de7d28b..86600f55 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -115,11 +115,11 @@ terminado==0.18.1 # jupyter-server-terminals toposort==1.10 # via pip-compile-multi -types-python-dateutil==2.9.0.20240316 +types-python-dateutil==2.9.0.20240821 # via arrow uri-template==1.3.0 # via jsonschema -webcolors==24.6.0 +webcolors==24.8.0 # via jsonschema websocket-client==1.8.0 # via jupyter-server diff --git a/requirements/docs.txt b/requirements/docs.txt index a7952ab0..740df743 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -14,7 +14,7 @@ attrs==24.2.0 # via # jsonschema # referencing -babel==2.15.0 +babel==2.16.0 # via # pydata-sphinx-theme # sphinx @@ -42,7 +42,7 @@ docutils==0.21.2 # sphinxcontrib-bibtex fastjsonschema==2.20.0 # via nbformat -idna==3.7 +idna==3.8 # via requests imagesize==1.4.1 # via sphinx @@ -102,7 +102,7 @@ nbformat==5.10.4 # nbclient # nbconvert # nbsphinx -nbsphinx==0.9.4 +nbsphinx==0.9.5 # via -r docs.in nest-asyncio==1.6.0 # via ipykernel @@ -130,7 +130,7 @@ pydata-sphinx-theme==0.15.4 # via -r docs.in pytz==2024.1 # via pandas -pyzmq==26.1.0 +pyzmq==26.2.0 # via # ipykernel # jupyter-client @@ -148,7 +148,7 @@ rpds-py==0.20.0 # referencing snowballstemmer==2.2.0 # via sphinx -soupsieve==2.5 +soupsieve==2.6 # via beautifulsoup4 sphinx==8.0.2 # via diff --git a/requirements/mypy.txt b/requirements/mypy.txt index e0374a17..c5815809 100644 --- a/requirements/mypy.txt +++ b/requirements/mypy.txt @@ -6,7 +6,7 @@ # pip-compile-multi # -r test.txt -mypy==1.11.1 +mypy==1.11.2 # via -r mypy.in mypy-extensions==1.0.0 # via mypy diff --git a/requirements/nightly.txt b/requirements/nightly.txt index f892b2c9..f9bcba28 100644 --- a/requirements/nightly.txt +++ b/requirements/nightly.txt @@ -20,7 +20,7 @@ cyclebane==24.6.0 # via sciline cycler==0.12.1 # via matplotlib -dask==2024.8.0 +dask==2024.8.1 # via -r nightly.in decorator==5.1.1 # via ipython @@ -38,25 +38,25 @@ h5py==3.11.0 # via # scippneutron # scippnexus -importlib-metadata==8.2.0 +importlib-metadata==8.4.0 # via dask ipydatawidgets==4.3.5 # via pythreejs ipython==8.26.0 # via ipywidgets -ipywidgets==8.1.3 +ipywidgets==8.1.5 # via # ipydatawidgets # pythreejs jedi==0.19.1 # via ipython -jupyterlab-widgets==3.0.11 +jupyterlab-widgets==3.0.13 # via ipywidgets kiwisolver==1.4.5 # via matplotlib locket==1.0.0 # via partd -matplotlib==3.9.1.post1 +matplotlib==3.9.2 # via # mpltoolbox # plopp @@ -86,14 +86,16 @@ pure-eval==0.2.3 # via stack-data pygments==2.18.0 # via ipython -pyparsing==3.1.2 +pyparsing==3.1.4 # via matplotlib pythreejs==2.4.2 # via -r nightly.in pyyaml==6.0.2 # via dask sciline @ git+https://github.com/scipp/sciline@main - # via -r nightly.in + # via + # -r nightly.in + # essreduce scipp @ https://github.com/scipp/scipp/releases/download/nightly/scipp-nightly-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl # via # -r nightly.in @@ -107,7 +109,7 @@ scippnexus @ git+https://github.com/scipp/scippnexus@main # -r nightly.in # essreduce # scippneutron -scipy==1.14.0 +scipy==1.14.1 # via # scippneutron # scippnexus @@ -131,7 +133,7 @@ typing-extensions==4.12.2 # via ipython wcwidth==0.2.13 # via prompt-toolkit -widgetsnbextension==4.0.11 +widgetsnbextension==4.0.13 # via ipywidgets -zipp==3.19.2 +zipp==3.20.0 # via importlib-metadata From 25226c9692ab3aa6c586c741a385e0d60af5e685 Mon Sep 17 00:00:00 2001 From: Simon Heybrock Date: Mon, 26 Aug 2024 13:19:46 +0200 Subject: [PATCH 8/8] Refactor notebooks --- docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb b/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb index cd6415ed..7864fe09 100644 --- a/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb +++ b/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb @@ -239,7 +239,7 @@ "source": [ "results = workflow.compute(\n", " (\n", - " ReducibleDetectorData[SampleRun],\n", + " DetectorData[SampleRun],\n", " MaskedData[SampleRun],\n", " FilteredData[SampleRun],\n", " FilteredData[VanadiumRun],\n", @@ -254,7 +254,7 @@ "metadata": {}, "outputs": [], "source": [ - "results[ReducibleDetectorData[SampleRun]]" + "results[DetectorData[SampleRun]]" ] }, {