From f5cb9be54d939b6c536813a8537355b9b1557ddf Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Tue, 17 Dec 2024 16:30:33 +0100 Subject: [PATCH 1/6] feat: proton current correction --- src/ess/amor/load.py | 20 +++++++++++++++++-- src/ess/amor/workflow.py | 17 ++++++++++++++-- src/ess/reflectometry/conversions.py | 30 ++++++++++++++++++++++++++++ src/ess/reflectometry/corrections.py | 5 +++++ src/ess/reflectometry/types.py | 4 ++++ 5 files changed, 72 insertions(+), 4 deletions(-) diff --git a/src/ess/amor/load.py b/src/ess/amor/load.py index 9737e78b..5705ee77 100644 --- a/src/ess/amor/load.py +++ b/src/ess/amor/load.py @@ -9,6 +9,7 @@ Filename, LoadedNeXusDetector, NeXusDetectorName, + ProtonCurrent, RawDetectorData, RunType, SampleRotation, @@ -43,10 +44,15 @@ def load_events( beam_size: BeamSize[RunType], angle_to_center_of_beam: AngleCenterOfIncomingToHorizon[RunType], ) -> RawDetectorData[RunType]: + event_data = detector["data"] + if 'event_time_zero' in event_data.coords: + event_data.bins.coords['event_time_zero'] = sc.bins_like( + event_data, fill_value=event_data.coords['event_time_zero'] + ) + detector_numbers = pixel_coordinates_in_detector_system() data = ( - detector["data"] - .bins.constituents["data"] + event_data.bins.constituents["data"] .group(detector_numbers.data.flatten(to='event_id')) .fold("event_id", sizes=detector_numbers.sizes) ) @@ -126,6 +132,15 @@ def load_amor_angle_from_horizon_to_center_of_incident_beam( ) +def load_amor_proton_current( + fp: Filename[RunType], +) -> ProtonCurrent[RunType]: + (pc,) = load_nx(fp, 'NXentry/NXinstrument/NXdetector/proton_current') + pc = pc['value']['dim_1', 0] + pc.data.unit = 'mA/s' + return pc + + providers = ( load_detector, load_events, @@ -136,5 +151,6 @@ def load_amor_angle_from_horizon_to_center_of_incident_beam( load_amor_sample_rotation, load_amor_detector_rotation, load_amor_angle_from_horizon_to_center_of_incident_beam, + load_amor_proton_current, amor_chopper, ) diff --git a/src/ess/amor/workflow.py b/src/ess/amor/workflow.py index f1cbf997..e87bb23c 100644 --- a/src/ess/amor/workflow.py +++ b/src/ess/amor/workflow.py @@ -1,6 +1,11 @@ -from ..reflectometry.corrections import correct_by_footprint +from ..reflectometry.conversions import ( + add_proton_current_coord, + add_proton_current_mask, +) +from ..reflectometry.corrections import correct_by_footprint, correct_by_proton_current from ..reflectometry.types import ( BeamDivergenceLimits, + ProtonCurrent, RawDetectorData, ReducibleData, RunType, @@ -18,6 +23,7 @@ def add_coords_masks_and_apply_corrections( zlims: ZIndexLimits, bdlim: BeamDivergenceLimits, wbins: WavelengthBins, + proton_current: ProtonCurrent[RunType], graph: CoordTransformationGraph, ) -> ReducibleData[RunType]: """ @@ -27,7 +33,14 @@ def add_coords_masks_and_apply_corrections( da = add_coords(da, graph) da = add_masks(da, ylim, zlims, bdlim, wbins) correct_by_footprint(da) - return da + + # For some older Amor files there are no entries in the proton current log + if len(proton_current) != 0: + add_proton_current_coord(da, proton_current) + add_proton_current_mask(da) + correct_by_proton_current(da) + + return ReducibleData[RunType](da) providers = (add_coords_masks_and_apply_corrections,) diff --git a/src/ess/reflectometry/conversions.py b/src/ess/reflectometry/conversions.py index 8e5dca25..f05c4ccd 100644 --- a/src/ess/reflectometry/conversions.py +++ b/src/ess/reflectometry/conversions.py @@ -4,6 +4,8 @@ from scipp.constants import pi from scippneutron._utils import elem_dtype +from .types import ProtonCurrent, RunType + def reflectometry_q(wavelength: sc.Variable, theta: sc.Variable) -> sc.Variable: """ @@ -26,4 +28,32 @@ def reflectometry_q(wavelength: sc.Variable, theta: sc.Variable) -> sc.Variable: return c * sc.sin(theta.astype(dtype, copy=False)) / wavelength +def add_proton_current_coord( + da, + pc: ProtonCurrent[RunType], +): + """Find the proton current value for each event and + adds it as a coord to the data array.""" + pc_lookup = sc.lookup( + pc, + dim='time', + mode='previous', + fill_value=sc.scalar(float('nan'), unit=pc.unit), + ) + # Useful for comparing the proton current to what is typical + da.coords['median_proton_current'] = sc.median(pc).data + da.coords.set_aligned('median_proton_current', False) + da.bins.coords['proton_current'] = pc_lookup(da.bins.coords['event_time_zero']) + + +def add_proton_current_mask(da: sc.DataArray): + """Masks events where the proton current was too low or where + the proton current is nan.""" + # Take inverse and use >= because we want to mask nan values + da.bins.masks['proton_current_too_low'] = ~( + da.bins.coords['proton_current'] >= da.coords['median_proton_current'] / 4 + ) + return da + + providers = () diff --git a/src/ess/reflectometry/corrections.py b/src/ess/reflectometry/corrections.py index 0cd8716a..01f6578f 100644 --- a/src/ess/reflectometry/corrections.py +++ b/src/ess/reflectometry/corrections.py @@ -37,3 +37,8 @@ def correct_by_footprint(da: sc.DataArray) -> None: da.coords['beam_size'], da.coords['sample_size'], ) + + +def correct_by_proton_current(da: sc.DataArray) -> None: + "Corrects the data by the proton current during the time of data collection" + da /= da.bins.coords['proton_current'] diff --git a/src/ess/reflectometry/types.py b/src/ess/reflectometry/types.py index dd31d465..03b93d36 100644 --- a/src/ess/reflectometry/types.py +++ b/src/ess/reflectometry/types.py @@ -93,6 +93,10 @@ class SampleSize(sciline.Scope[RunType, sc.Variable], sc.Variable): """Size of the sample. If None it is assumed to be the same as the reference.""" +class ProtonCurrent(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """Proton current log from file""" + + YIndexLimits = NewType("YIndexLimits", tuple[sc.Variable, sc.Variable]) """Limit of the (logical) 'y' detector pixel index""" From db560477388156d3b412a3bad267a2920e674f12 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Mon, 27 Jan 2025 15:01:34 +0100 Subject: [PATCH 2/6] docs: clarify that functions modify input inplace --- src/ess/amor/conversions.py | 3 +-- src/ess/amor/workflow.py | 2 +- src/ess/reflectometry/conversions.py | 5 ++--- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/ess/amor/conversions.py b/src/ess/amor/conversions.py index 17fcc302..32e5058a 100644 --- a/src/ess/amor/conversions.py +++ b/src/ess/amor/conversions.py @@ -149,7 +149,7 @@ def add_masks( zlims: ZIndexLimits, bdlim: BeamDivergenceLimits, wbins: WavelengthBins, -): +) -> None: """ Masks the data by ranges in the detector coordinates ``z`` and ``y``, and by the divergence of the beam, @@ -167,7 +167,6 @@ def add_masks( wbins[0], wbins[-1], ) - return da providers = (coordinate_transformation_graph,) diff --git a/src/ess/amor/workflow.py b/src/ess/amor/workflow.py index e87bb23c..e6884b56 100644 --- a/src/ess/amor/workflow.py +++ b/src/ess/amor/workflow.py @@ -31,7 +31,7 @@ def add_coords_masks_and_apply_corrections( the same for the sample measurement and the reference measurement. """ da = add_coords(da, graph) - da = add_masks(da, ylim, zlims, bdlim, wbins) + add_masks(da, ylim, zlims, bdlim, wbins) correct_by_footprint(da) # For some older Amor files there are no entries in the proton current log diff --git a/src/ess/reflectometry/conversions.py b/src/ess/reflectometry/conversions.py index f05c4ccd..c25cdd8d 100644 --- a/src/ess/reflectometry/conversions.py +++ b/src/ess/reflectometry/conversions.py @@ -31,7 +31,7 @@ def reflectometry_q(wavelength: sc.Variable, theta: sc.Variable) -> sc.Variable: def add_proton_current_coord( da, pc: ProtonCurrent[RunType], -): +) -> None: """Find the proton current value for each event and adds it as a coord to the data array.""" pc_lookup = sc.lookup( @@ -46,14 +46,13 @@ def add_proton_current_coord( da.bins.coords['proton_current'] = pc_lookup(da.bins.coords['event_time_zero']) -def add_proton_current_mask(da: sc.DataArray): +def add_proton_current_mask(da: sc.DataArray) -> None: """Masks events where the proton current was too low or where the proton current is nan.""" # Take inverse and use >= because we want to mask nan values da.bins.masks['proton_current_too_low'] = ~( da.bins.coords['proton_current'] >= da.coords['median_proton_current'] / 4 ) - return da providers = () From 68b9c6f86d3de8cd6e96026c282b9783aad8cc35 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Mon, 27 Jan 2025 15:53:17 +0100 Subject: [PATCH 3/6] test: proton current is added --- tests/amor/pipeline_test.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/tests/amor/pipeline_test.py b/tests/amor/pipeline_test.py index c007c35f..a4e88097 100644 --- a/tests/amor/pipeline_test.py +++ b/tests/amor/pipeline_test.py @@ -14,7 +14,9 @@ from ess.reflectometry import orso from ess.reflectometry.types import ( Filename, + ProtonCurrent, QBins, + ReducibleData, ReferenceRun, ReflectivityOverQ, SampleRotation, @@ -127,3 +129,32 @@ def test_pipeline_merging_events_result_unchanged(amor_pipeline: sciline.Pipelin assert_allclose( 2 * sc.variances(result.data), sc.variances(result2.data), rtol=sc.scalar(1e-6) ) + + +@pytest.mark.filterwarnings("ignore:Failed to convert .* into a transformation") +@pytest.mark.filterwarnings("ignore:Invalid transformation, missing attribute") +def test_proton_current(amor_pipeline: sciline.Pipeline): + # The sample rotation value in the file is slightly off, so we set it manually + amor_pipeline[SampleRotation[SampleRun]] = sc.scalar(0.85, unit="deg") + amor_pipeline[Filename[SampleRun]] = amor.data.amor_sample_run(611) + da_without_proton_current = amor_pipeline.compute(ReducibleData[SampleRun]) + amor_pipeline[ProtonCurrent[SampleRun]] = sc.DataArray( + sc.array(dims=['time'], values=[1, 2, 0.1]), + coords={ + 'time': sc.array( + dims=['time'], + values=[1699883542349602112, 1699883542349602112, 1699886864071691036], + dtype='datetime64', + unit='ns', + ) + }, + ) + da_with_proton_current = amor_pipeline.compute(ReducibleData[SampleRun]) + + assert "proton_current" in da_with_proton_current.bins.coords + assert "proton_current_too_low" in da_with_proton_current.bins.masks + assert da_with_proton_current.bins.masks["proton_current_too_low"].any() + assert not da_with_proton_current.bins.masks["proton_current_too_low"].all() + + assert "proton_current" not in da_without_proton_current.bins.coords + assert "proton_current_too_low" not in da_without_proton_current.bins.masks From cc610e4d140d23ce2d22d3cd6353859b08e14879 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Mon, 27 Jan 2025 16:10:48 +0100 Subject: [PATCH 4/6] test: correction value is expected --- tests/amor/pipeline_test.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/tests/amor/pipeline_test.py b/tests/amor/pipeline_test.py index a4e88097..ceaaa594 100644 --- a/tests/amor/pipeline_test.py +++ b/tests/amor/pipeline_test.py @@ -134,16 +134,17 @@ def test_pipeline_merging_events_result_unchanged(amor_pipeline: sciline.Pipelin @pytest.mark.filterwarnings("ignore:Failed to convert .* into a transformation") @pytest.mark.filterwarnings("ignore:Invalid transformation, missing attribute") def test_proton_current(amor_pipeline: sciline.Pipeline): - # The sample rotation value in the file is slightly off, so we set it manually - amor_pipeline[SampleRotation[SampleRun]] = sc.scalar(0.85, unit="deg") amor_pipeline[Filename[SampleRun]] = amor.data.amor_sample_run(611) da_without_proton_current = amor_pipeline.compute(ReducibleData[SampleRun]) + + proton_current = [1, 2, 0.1] + timestamps = [1699883542349602112, 1699883542349602112, 1699886864071691036] amor_pipeline[ProtonCurrent[SampleRun]] = sc.DataArray( - sc.array(dims=['time'], values=[1, 2, 0.1]), + sc.array(dims=['time'], values=proton_current), coords={ 'time': sc.array( dims=['time'], - values=[1699883542349602112, 1699883542349602112, 1699886864071691036], + values=timestamps, dtype='datetime64', unit='ns', ) @@ -158,3 +159,14 @@ def test_proton_current(amor_pipeline: sciline.Pipeline): assert "proton_current" not in da_without_proton_current.bins.coords assert "proton_current_too_low" not in da_without_proton_current.bins.masks + + t = ( + da_with_proton_current.bins.constituents['data'] + .coords['event_time_zero'][0] + .value.astype('uint64') + ) + w_with = da_with_proton_current.bins.constituents['data'].data[0].value + w_without = da_without_proton_current.bins.constituents['data'].data[0].value + np.testing.assert_allclose( + proton_current[np.searchsorted(timestamps, t) - 1], w_without / w_with + ) From e23a039986e821f4e6b4f7376ee8128925877f33 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Mon, 3 Feb 2025 15:12:06 +0100 Subject: [PATCH 5/6] fix: avoid mutating input --- src/ess/amor/workflow.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/ess/amor/workflow.py b/src/ess/amor/workflow.py index e6884b56..1d24d922 100644 --- a/src/ess/amor/workflow.py +++ b/src/ess/amor/workflow.py @@ -32,6 +32,9 @@ def add_coords_masks_and_apply_corrections( """ da = add_coords(da, graph) add_masks(da, ylim, zlims, bdlim, wbins) + + # Copy before applying corrections + da.data = da.data.copy() correct_by_footprint(da) # For some older Amor files there are no entries in the proton current log From 12d2125881d2b3a66f7b4cb2cfcb5b27d6699055 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Tue, 4 Feb 2025 09:14:42 +0100 Subject: [PATCH 6/6] fix: remove inplace changes --- src/ess/amor/conversions.py | 27 ++++++++++++++++----------- src/ess/amor/workflow.py | 12 +++++------- src/ess/reflectometry/conversions.py | 20 +++++++++++++------- src/ess/reflectometry/corrections.py | 8 ++++---- 4 files changed, 38 insertions(+), 29 deletions(-) diff --git a/src/ess/amor/conversions.py b/src/ess/amor/conversions.py index 32e5058a..b97ce0d0 100644 --- a/src/ess/amor/conversions.py +++ b/src/ess/amor/conversions.py @@ -149,24 +149,29 @@ def add_masks( zlims: ZIndexLimits, bdlim: BeamDivergenceLimits, wbins: WavelengthBins, -) -> None: +) -> sc.DataArray: """ Masks the data by ranges in the detector coordinates ``z`` and ``y``, and by the divergence of the beam, and by wavelength. """ - da.masks["stripe_range"] = _not_between(da.coords["stripe"], *ylim) - da.masks['z_range'] = _not_between(da.coords["z_index"], *zlims) - da.bins.masks["divergence_too_large"] = _not_between( - da.bins.coords["angle_of_divergence"], - bdlim[0].to(unit=da.bins.coords["angle_of_divergence"].bins.unit), - bdlim[1].to(unit=da.bins.coords["angle_of_divergence"].bins.unit), + da = da.assign_masks( + stripe_range=_not_between(da.coords["stripe"], *ylim), + z_range=_not_between(da.coords["z_index"], *zlims), ) - da.bins.masks['wavelength'] = _not_between( - da.bins.coords['wavelength'], - wbins[0], - wbins[-1], + da = da.bins.assign_masks( + divergence_too_large=_not_between( + da.bins.coords["angle_of_divergence"], + bdlim[0].to(unit=da.bins.coords["angle_of_divergence"].bins.unit), + bdlim[1].to(unit=da.bins.coords["angle_of_divergence"].bins.unit), + ), + wavelength=_not_between( + da.bins.coords['wavelength'], + wbins[0], + wbins[-1], + ), ) + return da providers = (coordinate_transformation_graph,) diff --git a/src/ess/amor/workflow.py b/src/ess/amor/workflow.py index 1d24d922..974437e3 100644 --- a/src/ess/amor/workflow.py +++ b/src/ess/amor/workflow.py @@ -31,17 +31,15 @@ def add_coords_masks_and_apply_corrections( the same for the sample measurement and the reference measurement. """ da = add_coords(da, graph) - add_masks(da, ylim, zlims, bdlim, wbins) + da = add_masks(da, ylim, zlims, bdlim, wbins) - # Copy before applying corrections - da.data = da.data.copy() - correct_by_footprint(da) + da = correct_by_footprint(da) # For some older Amor files there are no entries in the proton current log if len(proton_current) != 0: - add_proton_current_coord(da, proton_current) - add_proton_current_mask(da) - correct_by_proton_current(da) + da = add_proton_current_coord(da, proton_current) + da = add_proton_current_mask(da) + da = correct_by_proton_current(da) return ReducibleData[RunType](da) diff --git a/src/ess/reflectometry/conversions.py b/src/ess/reflectometry/conversions.py index c25cdd8d..5a3d576b 100644 --- a/src/ess/reflectometry/conversions.py +++ b/src/ess/reflectometry/conversions.py @@ -29,9 +29,9 @@ def reflectometry_q(wavelength: sc.Variable, theta: sc.Variable) -> sc.Variable: def add_proton_current_coord( - da, + da: sc.DataArray, pc: ProtonCurrent[RunType], -) -> None: +) -> sc.DataArray: """Find the proton current value for each event and adds it as a coord to the data array.""" pc_lookup = sc.lookup( @@ -41,18 +41,24 @@ def add_proton_current_coord( fill_value=sc.scalar(float('nan'), unit=pc.unit), ) # Useful for comparing the proton current to what is typical - da.coords['median_proton_current'] = sc.median(pc).data + da = da.assign_coords(median_proton_current=sc.median(pc).data) da.coords.set_aligned('median_proton_current', False) - da.bins.coords['proton_current'] = pc_lookup(da.bins.coords['event_time_zero']) + da = da.bins.assign_coords( + proton_current=pc_lookup(da.bins.coords['event_time_zero']) + ) + return da -def add_proton_current_mask(da: sc.DataArray) -> None: +def add_proton_current_mask(da: sc.DataArray) -> sc.DataArray: """Masks events where the proton current was too low or where the proton current is nan.""" # Take inverse and use >= because we want to mask nan values - da.bins.masks['proton_current_too_low'] = ~( - da.bins.coords['proton_current'] >= da.coords['median_proton_current'] / 4 + da = da.bins.assign_masks( + proton_current_too_low=~( + da.bins.coords['proton_current'] >= da.coords['median_proton_current'] / 4 + ) ) + return da providers = () diff --git a/src/ess/reflectometry/corrections.py b/src/ess/reflectometry/corrections.py index 01f6578f..2906b244 100644 --- a/src/ess/reflectometry/corrections.py +++ b/src/ess/reflectometry/corrections.py @@ -30,15 +30,15 @@ def footprint_on_sample( return sc.erf(fwhm_to_std(sample_size / size_of_beam_on_sample)) -def correct_by_footprint(da: sc.DataArray) -> None: +def correct_by_footprint(da: sc.DataArray) -> sc.DataArray: "Corrects the data by the size of the footprint on the sample." - da /= footprint_on_sample( + return da / footprint_on_sample( da.bins.coords['theta'], da.coords['beam_size'], da.coords['sample_size'], ) -def correct_by_proton_current(da: sc.DataArray) -> None: +def correct_by_proton_current(da: sc.DataArray) -> sc.DataArray: "Corrects the data by the proton current during the time of data collection" - da /= da.bins.coords['proton_current'] + return da / da.bins.coords['proton_current']