From 08fbb8efffe0b1b4bb1e19b416e61966ca2e1f82 Mon Sep 17 00:00:00 2001 From: Jan-Lukas Wynen Date: Tue, 21 Nov 2023 10:58:29 +0100 Subject: [PATCH 1/2] Use load_with_mantid instead of load --- docs/examples/preprocess_files.ipynb | 71 +++++++++------------------- 1 file changed, 23 insertions(+), 48 deletions(-) diff --git a/docs/examples/preprocess_files.ipynb b/docs/examples/preprocess_files.ipynb index bf565434..4cd44b42 100644 --- a/docs/examples/preprocess_files.ipynb +++ b/docs/examples/preprocess_files.ipynb @@ -21,25 +21,8 @@ "source": [ "import scipp as sc\n", "import scippneutron as scn\n", - "import plopp as pp\n", "\n", - "import ess\n", - "from ess.diffraction.external import load_calibration\n", - "from ess.diffraction import powder\n", - "from ess import diffraction\n", - "from ess.external import powgen" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c88d48df-272a-4f83-8b4a-43450a365b1f", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "ess.logging.configure_workflow('powgen_preprocess', filename=None)" + "from ess.diffraction.external import load_calibration, powgen" ] }, { @@ -57,10 +40,11 @@ "metadata": {}, "outputs": [], "source": [ - "sample = scn.load(powgen.data.mantid_sample_file(),\n", - " advanced_geometry=True,\n", - " load_pulse_times=False,\n", - " mantid_args={\"LoadMonitors\": False})" + "sample = scn.load_with_mantid(\n", + " powgen.data.mantid_sample_file(),\n", + " advanced_geometry=True,\n", + " load_pulse_times=False,\n", + " mantid_args={\"LoadMonitors\": False})" ] }, { @@ -80,12 +64,12 @@ "metadata": {}, "outputs": [], "source": [ - "sample.coords['gd_prtn_chrg'] = sample.attrs.pop('gd_prtn_chrg')\n", - "sample.coords.set_aligned('gd_prtn_chrg', False)\n", - "sample.attrs.clear()\n", + "sample_data = sample['data']\n", + "sample_data.coords['gd_prtn_chrg'] = sample['gd_prtn_chrg']\n", + "sample_data.coords.set_aligned('gd_prtn_chrg', False)\n", "sample_dg = sc.DataGroup({\n", - " 'data': sample,\n", - " 'detector_info': sample.coords.pop('detector_info').value,\n", + " 'data': sample_data,\n", + " 'detector_info': sample_data.coords.pop('detector_info').value\n", "})" ] }, @@ -134,10 +118,11 @@ "metadata": {}, "outputs": [], "source": [ - "vana = scn.load(powgen.data.mantid_vanadium_file(),\n", - " advanced_geometry=False,\n", - " load_pulse_times=True,\n", - " mantid_args={\"LoadMonitors\": False})" + "vana = scn.load_with_mantid(\n", + " powgen.data.mantid_vanadium_file(),\n", + " advanced_geometry=False,\n", + " load_pulse_times=True,\n", + " mantid_args={\"LoadMonitors\": False})" ] }, { @@ -147,13 +132,13 @@ "metadata": {}, "outputs": [], "source": [ - "vana.coords['gd_prtn_chrg'] = vana.attrs.pop('gd_prtn_chrg')\n", - "vana.coords.set_aligned('gd_prtn_chrg', False)\n", + "vana_data = vana['data']\n", + "vana_data.coords['gd_prtn_chrg'] = vana['gd_prtn_chrg']\n", + "vana_data.coords.set_aligned('gd_prtn_chrg', False)\n", "vana_dg = sc.DataGroup({\n", - " 'data': vana,\n", - " 'proton_charge': vana.attrs.pop('proton_charge').value.rename(time='pulse_time')\n", - "})\n", - "vana.attrs.clear()" + " 'data': vana_data,\n", + " 'proton_charge': vana['proton_charge'].rename(time='pulse_time')\n", + "})" ] }, { @@ -176,16 +161,6 @@ "vana_dg['data'].bins.constituents['data']" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "ec0493a6-1b1f-42da-bc05-f5f511a95866", - "metadata": {}, - "outputs": [], - "source": [ - "vana_dg['data'].bins.constituents['data'].coords['tof']" - ] - }, { "cell_type": "code", "execution_count": null, @@ -274,7 +249,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.17" + "version": "3.10.13" } }, "nbformat": 4, From e850361c3c7f7ed1c0af0a352b4c5c3442acb30a Mon Sep 17 00:00:00 2001 From: Jan-Lukas Wynen Date: Tue, 21 Nov 2023 11:08:48 +0100 Subject: [PATCH 2/2] Avoid uses of attrs and meta --- pyproject.toml | 1 - src/ess/diffraction/corrections.py | 18 ++++----- .../external/powgen/instrument_view.py | 2 +- src/ess/diffraction/grouping.py | 4 +- src/ess/diffraction/powder/conversions.py | 11 +----- src/ess/diffraction/powder/corrections.py | 6 +-- tests/diffraction/filtering_test.py | 37 ++++++++++--------- tests/diffraction/powder/corrections_test.py | 10 ++--- 8 files changed, 40 insertions(+), 49 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index bcff7d56..1fe9e4f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,7 +56,6 @@ testpaths = "tests" filterwarnings = [ "error", 'ignore:\n Sentinel is not a public part of the traitlets API:DeprecationWarning', - 'ignore:sc.DataArray.attrs has been deprecated:scipp.VisibleDeprecationWarning' ] [tool.bandit] diff --git a/src/ess/diffraction/corrections.py b/src/ess/diffraction/corrections.py index af870fc5..2fe51576 100644 --- a/src/ess/diffraction/corrections.py +++ b/src/ess/diffraction/corrections.py @@ -12,7 +12,7 @@ def normalize_by_monitor( data: sc.DataArray, *, - monitor: str, + monitor: sc.DataArray, wavelength_edges: Optional[sc.Variable] = None, smooth_args: Optional[Dict[str, Any]] = None, ) -> sc.DataArray: @@ -26,7 +26,7 @@ def normalize_by_monitor( data: Input event data. monitor: - Name of a histogrammed monitor. Must be stored as metadata in `data`. + A histogrammed monitor. wavelength_edges: If given, rebin the monitor with these edges. smooth_args: @@ -40,9 +40,8 @@ def normalize_by_monitor( : `data` normalized by a monitor. """ - mon = data.meta[monitor].value - if 'wavelength' not in mon.coords: - mon = mon.transform_coords( + if 'wavelength' not in monitor.coords: + monitor = monitor.transform_coords( 'wavelength', graph={**beamline.beamline(scatter=False), **tof.elastic("tof")}, keep_inputs=False, @@ -51,16 +50,15 @@ def normalize_by_monitor( ) if wavelength_edges is not None: - mon = mon.rebin(wavelength=wavelength_edges) + monitor = monitor.rebin(wavelength=wavelength_edges) if smooth_args is not None: get_logger().info( - "Smoothing monitor '%s' for normalization using " + "Smoothing monitor for normalization using " "ess.diffraction.smoothing.lowpass with %s.", - monitor, smooth_args, ) - mon = lowpass(mon, dim='wavelength', **smooth_args) - return data.bins / sc.lookup(func=mon, dim='wavelength') + monitor = lowpass(monitor, dim='wavelength', **smooth_args) + return data.bins / sc.lookup(func=monitor, dim='wavelength') def normalize_by_vanadium( diff --git a/src/ess/diffraction/external/powgen/instrument_view.py b/src/ess/diffraction/external/powgen/instrument_view.py index bb0180c5..15002c59 100644 --- a/src/ess/diffraction/external/powgen/instrument_view.py +++ b/src/ess/diffraction/external/powgen/instrument_view.py @@ -19,7 +19,7 @@ def instrument_view( Parameters ---------- positions: - Key for coord/attr holding positions to use for pixels + Key for coord holding positions to use for pixels pixel_size: Custom pixel size to use for detector pixels components: diff --git a/src/ess/diffraction/grouping.py b/src/ess/diffraction/grouping.py index c8199257..5f18b7ae 100644 --- a/src/ess/diffraction/grouping.py +++ b/src/ess/diffraction/grouping.py @@ -11,8 +11,8 @@ def group_by_two_theta(data: sc.DataArray, *, edges: sc.Variable) -> sc.DataArra Parameters ---------- data: - Input data array with events. Must contain a coord or attr called 'two_theta' - or coords or attrs that can be used to compute it. + Input data array with events. Must contain a coord called 'two_theta' + or coords that can be used to compute it. edges: Bin edges in two_theta. `data` is grouped into those bins. diff --git a/src/ess/diffraction/powder/conversions.py b/src/ess/diffraction/powder/conversions.py index c7e0ca0d..ff2cbf9b 100644 --- a/src/ess/diffraction/powder/conversions.py +++ b/src/ess/diffraction/powder/conversions.py @@ -111,13 +111,13 @@ def to_dspacing_with_calibration( Raises ------ KeyError - If `data` does not contain a 'tof' metadata. + If `data` does not contain a 'tof' coordinate. Parameters ---------- data: Input data in tof or wavelength dimension. - Must have a tof coordinate or attribute. + Must have a tof coordinate. calibration: Calibration data. If given, use it for the conversion. Otherwise, the calibration data must be stored in `data`. @@ -146,13 +146,6 @@ def to_dspacing_with_calibration( else: out.coords['_tag_positions_consumed'] = sc.scalar(0) - # TODO: The need for attribute popping is a side-effect from using the deprecated - # scippneutron.load() function. Once we switch to using `load_with_mantid`, we - # should be able to remove this. - for key in ('difc', 'difa', 'tzero'): - if key not in out.coords: - out.coords[key] = out.attrs.pop(key) - out = out.transform_coords('dspacing', graph=graph, keep_intermediate=False) out.coords.pop('_tag_positions_consumed', None) return out diff --git a/src/ess/diffraction/powder/corrections.py b/src/ess/diffraction/powder/corrections.py index c9fcffdb..222c8b39 100644 --- a/src/ess/diffraction/powder/corrections.py +++ b/src/ess/diffraction/powder/corrections.py @@ -5,7 +5,7 @@ def merge_calibration(*, into: sc.DataArray, calibration: sc.Dataset) -> sc.DataArray: """ - Return a scipp.DataArray containing calibration metadata. + Return a scipp.DataArray containing calibration metadata as coordinates. Parameters ---------- @@ -31,12 +31,12 @@ def merge_calibration(*, into: sc.DataArray, calibration: sc.Dataset) -> sc.Data ) out = into.copy(deep=False) for name in ('difa', 'difc', 'tzero'): - if name in out.meta: + if name in out.coords: raise ValueError( f"Cannot add calibration parameter '{name}' to data, " "there already is metadata with the same name." ) - out.attrs[name] = calibration[name].data + out.coords[name] = calibration[name].data if 'calibration' in out.masks: raise ValueError( "Cannot add calibration mask 'calibration' tp data, " diff --git a/tests/diffraction/filtering_test.py b/tests/diffraction/filtering_test.py index 6f13385f..20de49a7 100644 --- a/tests/diffraction/filtering_test.py +++ b/tests/diffraction/filtering_test.py @@ -1,13 +1,16 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2023 Scipp contributors (https://github.com/scipp) # @author Jan-Lukas Wynen + +from typing import Tuple + import numpy as np import scipp as sc from ess.diffraction import filtering -def make_data_with_pulse_time(rng, n_event): +def make_data_with_pulse_time(rng, n_event) -> sc.DataArray: start_time = sc.scalar(np.datetime64('2022-03-14T14:42:37.12345', 'ns')) pulse_time = start_time + sc.array( dims=['event'], @@ -55,7 +58,7 @@ def test_make_data_with_pulse_time(): def make_data_with_pulse_time_and_proton_charge( rng, n_event, n_proton_charge, bad_charge, bad_charge_indices -): +) -> Tuple[sc.DataArray, sc.DataArray]: data = make_data_with_pulse_time(rng, n_event) start_time = data.bins.coords['pulse_time'].min() @@ -77,33 +80,32 @@ def make_data_with_pulse_time_and_proton_charge( for i in bad_charge_indices: proton_charge[i] = bad_charge - data.attrs['proton_charge'] = sc.scalar(proton_charge) - return data + return data, proton_charge def test_make_data_with_pulse_time_and_proton_charge(): rng = np.random.default_rng(65501) bad_charge = sc.scalar(1.0e5, unit='pC') - data = make_data_with_pulse_time_and_proton_charge( + data, proton_charge = make_data_with_pulse_time_and_proton_charge( rng, 100, 300, bad_charge, [0, 2, 4] ) assert 'pulse_time' in data.bins.coords - assert sc.identical(data.attrs['proton_charge'].value.data[0], bad_charge) - assert sc.identical(data.attrs['proton_charge'].value.data[2], bad_charge) - assert sc.identical(data.attrs['proton_charge'].value.data[4], bad_charge) - assert (data.attrs['proton_charge'].value.data[1] > bad_charge).value - assert (data.attrs['proton_charge'].value.data[3] > bad_charge).value + assert sc.identical(proton_charge.data[0], bad_charge) + assert sc.identical(proton_charge.data[2], bad_charge) + assert sc.identical(proton_charge.data[4], bad_charge) + assert proton_charge.data[1] > bad_charge + assert proton_charge.data[3] > bad_charge def test_remove_bad_pulses_does_not_modify_input(): rng = np.random.default_rng(65501) bad_charge = sc.scalar(1.0e5, unit='pC') - data = make_data_with_pulse_time_and_proton_charge( + data, proton_charge = make_data_with_pulse_time_and_proton_charge( rng, 100, 300, bad_charge, bad_charge_indices=[0, 10, 100, 150, 200] ) original = data.copy() _ = filtering.remove_bad_pulses( - data, proton_charge=data.attrs['proton_charge'].value, threshold_factor=0.9 + data, proton_charge=proton_charge, threshold_factor=0.9 ) assert sc.identical(data, original) @@ -111,11 +113,11 @@ def test_remove_bad_pulses_does_not_modify_input(): def test_remove_bad_pulses_without_bad_pulses(): rng = np.random.default_rng(65501) bad_charge = sc.scalar(1.0e5, unit='pC') - data = make_data_with_pulse_time_and_proton_charge( + data, proton_charge = make_data_with_pulse_time_and_proton_charge( rng, 100, 300, bad_charge, bad_charge_indices=[] ) filtered = filtering.remove_bad_pulses( - data, proton_charge=data.attrs['proton_charge'].value, threshold_factor=0.0 + data, proton_charge=proton_charge, threshold_factor=0.0 ) assert sc.identical(filtered, data) @@ -123,11 +125,11 @@ def test_remove_bad_pulses_without_bad_pulses(): def test_remove_bad_pulses_without_good_pulses(): rng = np.random.default_rng(65501) bad_charge = sc.scalar(1.0e5, unit='pC') - data = make_data_with_pulse_time_and_proton_charge( + data, proton_charge = make_data_with_pulse_time_and_proton_charge( rng, 100, 300, bad_charge, bad_charge_indices=np.arange(300) ) filtered = filtering.remove_bad_pulses( - data, proton_charge=data.attrs['proton_charge'].value, threshold_factor=10.0 + data, proton_charge=proton_charge, threshold_factor=10.0 ) empty = data.copy() empty.bins.constituents['begin'][...] = sc.index(0) @@ -139,11 +141,10 @@ def test_remove_bad_pulses_contiguous_section(): rng = np.random.default_rng(65501) bad_charge = sc.scalar(1.0e5, unit='pC') bad_indices = np.arange(100, 120) - data = make_data_with_pulse_time_and_proton_charge( + data, proton_charge = make_data_with_pulse_time_and_proton_charge( rng, 100, 300, bad_charge, bad_indices ) - proton_charge = data.attrs['proton_charge'].value begin_removed = proton_charge.coords['pulse_time'][100] end_removed = proton_charge.coords['pulse_time'][120] data.bins.coords['should_be_removed'] = ( diff --git a/tests/diffraction/powder/corrections_test.py b/tests/diffraction/powder/corrections_test.py index da9c253c..920d28aa 100644 --- a/tests/diffraction/powder/corrections_test.py +++ b/tests/diffraction/powder/corrections_test.py @@ -42,9 +42,9 @@ def test_merge_calibration_add_all_parameters(calibration): ) with_cal = merge_calibration(into=da, calibration=calibration) - assert sc.identical(with_cal.attrs['difa'], calibration['difa'].data) - assert sc.identical(with_cal.attrs['difc'], calibration['difc'].data) - assert sc.identical(with_cal.attrs['tzero'], calibration['tzero'].data) + assert sc.identical(with_cal.coords['difa'], calibration['difa'].data) + assert sc.identical(with_cal.coords['difc'], calibration['difc'].data) + assert sc.identical(with_cal.coords['tzero'], calibration['tzero'].data) assert sc.identical(with_cal.masks['calibration'], calibration['mask'].data) @@ -89,9 +89,9 @@ def test_merge_calibration_raises_if_tzero_exists(calibration): da = sc.DataArray( sc.ones(sizes=calibration.sizes), coords={ - 'spectrum': sc.arange('spectrum', calibration.sizes['spectrum'], unit=None) + 'spectrum': sc.arange('spectrum', calibration.sizes['spectrum'], unit=None), + 'tzero': sc.ones(sizes={'spectrum': calibration.sizes['spectrum']}), }, - attrs={'tzero': sc.ones(sizes={'spectrum': calibration.sizes['spectrum']})}, ) with pytest.raises(ValueError): merge_calibration(into=da, calibration=calibration)