From 968a7d0087dbbce88afa01b9af51d16193bea0c2 Mon Sep 17 00:00:00 2001 From: Jan-Lukas Wynen Date: Mon, 20 Nov 2023 15:09:44 +0100 Subject: [PATCH 1/3] Copy from legacy ess --- pyproject.toml | 2 + src/ess/diffraction/__init__.py | 16 +- src/ess/diffraction/corrections.py | 90 ++++++++++ src/ess/diffraction/external/__init__.py | 141 +++++++++++++++ .../diffraction/external/powgen/__init__.py | 17 ++ .../diffraction/external/powgen/beamline.py | 46 +++++ src/ess/diffraction/external/powgen/data.py | 73 ++++++++ .../external/powgen/instrument_view.py | 42 +++++ src/ess/diffraction/filtering.py | 69 ++++++++ src/ess/diffraction/grouping.py | 25 +++ src/ess/diffraction/logging.py | 16 ++ src/ess/diffraction/powder/__init__.py | 9 + src/ess/diffraction/powder/conversions.py | 160 +++++++++++++++++ src/ess/diffraction/powder/corrections.py | 46 +++++ src/ess/diffraction/smoothing.py | 86 ++++++++++ tests/diffraction/__init__.py | 2 + tests/diffraction/filtering_test.py | 162 ++++++++++++++++++ tests/diffraction/load_test.py | 42 +++++ tests/diffraction/powder/__init__.py | 2 + tests/diffraction/powder/conversions_test.py | 131 ++++++++++++++ tests/diffraction/powder/corrections_test.py | 111 ++++++++++++ 21 files changed, 1287 insertions(+), 1 deletion(-) create mode 100644 src/ess/diffraction/corrections.py create mode 100644 src/ess/diffraction/external/__init__.py create mode 100644 src/ess/diffraction/external/powgen/__init__.py create mode 100644 src/ess/diffraction/external/powgen/beamline.py create mode 100644 src/ess/diffraction/external/powgen/data.py create mode 100644 src/ess/diffraction/external/powgen/instrument_view.py create mode 100644 src/ess/diffraction/filtering.py create mode 100644 src/ess/diffraction/grouping.py create mode 100644 src/ess/diffraction/logging.py create mode 100644 src/ess/diffraction/powder/__init__.py create mode 100644 src/ess/diffraction/powder/conversions.py create mode 100644 src/ess/diffraction/powder/corrections.py create mode 100644 src/ess/diffraction/smoothing.py create mode 100644 tests/diffraction/__init__.py create mode 100644 tests/diffraction/filtering_test.py create mode 100644 tests/diffraction/load_test.py create mode 100644 tests/diffraction/powder/__init__.py create mode 100644 tests/diffraction/powder/conversions_test.py create mode 100644 tests/diffraction/powder/corrections_test.py diff --git a/pyproject.toml b/pyproject.toml index 7cc1a84b..bcff7d56 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,6 +55,8 @@ addopts = "-ra -v" testpaths = "tests" filterwarnings = [ "error", + 'ignore:\n Sentinel is not a public part of the traitlets API:DeprecationWarning', + 'ignore:sc.DataArray.attrs has been deprecated:scipp.VisibleDeprecationWarning' ] [tool.bandit] diff --git a/src/ess/diffraction/__init__.py b/src/ess/diffraction/__init__.py index 6d115cc9..517b6aec 100644 --- a/src/ess/diffraction/__init__.py +++ b/src/ess/diffraction/__init__.py @@ -1,12 +1,26 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2023 Scipp contributors (https://github.com/scipp) -# flake8: noqa +""" +Components for diffraction experiments (powder and single crystal). +""" + import importlib.metadata +from .corrections import normalize_by_monitor, normalize_by_vanadium +from .grouping import group_by_two_theta +from .smoothing import lowpass + try: __version__ = importlib.metadata.version(__package__ or __name__) except importlib.metadata.PackageNotFoundError: __version__ = "0.0.0" del importlib + +__all__ = [ + 'lowpass', + 'group_by_two_theta', + 'normalize_by_monitor', + 'normalize_by_vanadium', +] diff --git a/src/ess/diffraction/corrections.py b/src/ess/diffraction/corrections.py new file mode 100644 index 00000000..af870fc5 --- /dev/null +++ b/src/ess/diffraction/corrections.py @@ -0,0 +1,90 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +from typing import Any, Dict, Optional + +import scipp as sc +from scippneutron.conversion.graph import beamline, tof + +from .logging import get_logger +from .smoothing import lowpass + + +def normalize_by_monitor( + data: sc.DataArray, + *, + monitor: str, + wavelength_edges: Optional[sc.Variable] = None, + smooth_args: Optional[Dict[str, Any]] = None, +) -> sc.DataArray: + """ + Normalize event data by a monitor. + + The input is converted to wavelength if it does not already contain wavelengths. + + Parameters + ---------- + data: + Input event data. + monitor: + Name of a histogrammed monitor. Must be stored as metadata in `data`. + wavelength_edges: + If given, rebin the monitor with these edges. + smooth_args: + If given, the monitor histogram is smoothed with + :func:`ess.diffraction.lowpass` before dividing into `data`. + `smooth_args` is passed as keyword arguments to + :func:`ess.diffraction.lowpass`. If ``None``, the monitor is not smoothed. + + Returns + ------- + : + `data` normalized by a monitor. + """ + mon = data.meta[monitor].value + if 'wavelength' not in mon.coords: + mon = mon.transform_coords( + 'wavelength', + graph={**beamline.beamline(scatter=False), **tof.elastic("tof")}, + keep_inputs=False, + keep_intermediate=False, + keep_aliases=False, + ) + + if wavelength_edges is not None: + mon = mon.rebin(wavelength=wavelength_edges) + if smooth_args is not None: + get_logger().info( + "Smoothing monitor '%s' for normalization using " + "ess.diffraction.smoothing.lowpass with %s.", + monitor, + smooth_args, + ) + mon = lowpass(mon, dim='wavelength', **smooth_args) + return data.bins / sc.lookup(func=mon, dim='wavelength') + + +def normalize_by_vanadium( + data: sc.DataArray, *, vanadium: sc.DataArray, edges: sc.Variable +) -> sc.DataArray: + """ + Normalize sample data by a vanadium measurement. + + Parameters + ---------- + data: + Sample data. + vanadium: + Vanadium data. + edges: + `vanadium` is histogrammed into these bins before dividing the data by it. + + Returns + ------- + : + `data` normalized by `vanadium`. + """ + norm = sc.lookup(vanadium.hist({edges.dim: edges}), dim=edges.dim) + # Converting to unit 'one' because the division might produce a unit + # with a large scale if the proton charges in data and vanadium were + # measured with different units. + return (data.bins / norm).to(unit='one', copy=False) diff --git a/src/ess/diffraction/external/__init__.py b/src/ess/diffraction/external/__init__.py new file mode 100644 index 00000000..339a5635 --- /dev/null +++ b/src/ess/diffraction/external/__init__.py @@ -0,0 +1,141 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +""" +Components for non-ESS diffraction experiments. + +WARNING This package will be removed in the future! +It only serves as helpers to develop workflows until it is determined, +which mechanisms and interfaces will be used at ESS. +""" +from pathlib import Path +from typing import Dict, Optional, Union + +import numpy as np +import scipp as sc +import scippneutron as scn + +from ..logging import get_logger + + +def _as_boolean_mask(var: sc.Variable) -> sc.Variable: + if var.dtype in ('float32', 'float64'): + if sc.any(var != var.to(dtype='int64')).value: + raise ValueError( + 'Cannot construct boolean mask, the input mask has fractional values.' + ) + return var.to(dtype=bool) + + +def _parse_calibration_instrument_args( + filename: Union[str, Path], + *, + instrument_filename: Optional[str] = None, + instrument_name: Optional[str] = None, +) -> Dict[str, str]: + if instrument_filename is not None: + if instrument_name is not None: + raise ValueError( + 'Only one argument of `instrument_name` and ' + '`instrument_filename` is allowed, got both.' + ) + instrument_arg = {'InstrumentFilename': instrument_filename} + instrument_message = f'with instrument file {instrument_filename}' + else: + if instrument_name is None: + raise ValueError( + 'Need one argument of `instrument_name` and ' + '`instrument_filename` is allowed, got neither.' + ) + instrument_arg = {'InstrumentName': instrument_name} + instrument_message = f'with instrument {instrument_name}' + + get_logger().info( + 'Loading calibration from file %s %s', filename, instrument_message + ) + return instrument_arg + + +def load_calibration( + filename: Union[str, Path], + *, + instrument_filename: Optional[str] = None, + instrument_name: Optional[str] = None, + mantid_args: Optional[dict] = None, +) -> sc.Dataset: + """ + Load and return calibration data. + + Warning + ------- + This function is designed to work with calibration files used by Mantid, + specifically for POWGEN. It does not represent the interface used at ESS + and will be removed in the future. + + Uses the Mantid algorithm `LoadDiffCal + `_ + and stores the data in a :class:`scipp.Dataset` + + Note that this function requires mantid to be installed and available in + the same Python environment as ess. + + Parameters + ---------- + filename: + The name of the calibration file to load. + instrument_filename: + Instrument definition file. + instrument_name: + Name of the instrument. + mantid_args: + Dictionary with additional arguments for the + `LoadDiffCal` Mantid algorithm. + + Returns + ------- + : + A Dataset containing the calibration data and masking. + """ + + mantid_args = {} if mantid_args is None else mantid_args + mantid_args.update( + _parse_calibration_instrument_args( + filename, + instrument_filename=instrument_filename, + instrument_name=instrument_name, + ) + ) + + with scn.mantid.run_mantid_alg( + 'LoadDiffCal', + Filename=str(filename), + MakeGroupingWorkspace=False, + **mantid_args, + ) as ws: + ds = scn.from_mantid(ws.OutputCalWorkspace) + mask_ws = ws.OutputMaskWorkspace + rows = mask_ws.getNumberHistograms() + mask = sc.array( + dims=['row'], + values=np.fromiter( + (mask_ws.readY(i)[0] for i in range(rows)), count=rows, dtype=np.bool_ + ), + unit=None, + ) + # This is deliberately not stored as a mask since that would make + # subsequent handling, e.g., with groupby, more complicated. The mask + # is conceptually not masking rows in this table, i.e., it is not + # marking invalid rows, but rather describes masking for other data. + ds["mask"] = _as_boolean_mask(mask) + + # The file does not define units + # TODO why those units? Can we verify? + ds["difc"].unit = 'us / angstrom' + ds["difa"].unit = 'us / angstrom**2' + ds["tzero"].unit = 'us' + + ds = ds.rename_dims({'row': 'detector'}) + ds.coords['detector'] = ds['detid'].data + ds.coords['detector'].unit = None + del ds['detid'] + + return ds diff --git a/src/ess/diffraction/external/powgen/__init__.py b/src/ess/diffraction/external/powgen/__init__.py new file mode 100644 index 00000000..cc8291da --- /dev/null +++ b/src/ess/diffraction/external/powgen/__init__.py @@ -0,0 +1,17 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +""" +Functions and classes for the POWGEN instrument. + +Note that this module is temporary and will be removed in favor of +the ``dream`` module when that is available. +""" + +from . import beamline, data +from .instrument_view import instrument_view + +__all__ = [ + 'beamline', + 'data', + 'instrument_view', +] diff --git a/src/ess/diffraction/external/powgen/beamline.py b/src/ess/diffraction/external/powgen/beamline.py new file mode 100644 index 00000000..958b5894 --- /dev/null +++ b/src/ess/diffraction/external/powgen/beamline.py @@ -0,0 +1,46 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +""" +Beamline parameters and utilities for POWGEN. +""" + +import scipp as sc + + +def map_detector_to_spectrum( + data: sc.DataArray, *, detector_info: sc.DataArray +) -> sc.DataArray: + """ + Transform 'detector' coords to 'spectrum'. + + Parameters + ---------- + data: + Input data whose 'detector' coord is transformed. + detector_info: + Defines mapping from detector numbers to spectra. + + Returns + ------- + : + `data` with 'detector' coord and dim replaced by 'spectrum'. + """ + if not sc.identical( + data.coords['detector'].to( + dtype=detector_info.coords['detector'].dtype, copy=False + ), + detector_info.coords['detector'], + ): + raise sc.CoordError( + "The 'detector' coords of `data` and `detector_info` do not match." + ) + + out = data.copy(deep=False) + del out.coords['detector'] + # Add 1 because spectrum numbers in the data start at 1 but + # detector_info contains spectrum indices which start at 0. + out.coords['spectrum'] = detector_info.coords['spectrum'] + sc.index( + 1, dtype=detector_info.coords['spectrum'].dtype + ) + + return out.rename_dims({'detector': 'spectrum'}) diff --git a/src/ess/diffraction/external/powgen/data.py b/src/ess/diffraction/external/powgen/data.py new file mode 100644 index 00000000..0fdb52b2 --- /dev/null +++ b/src/ess/diffraction/external/powgen/data.py @@ -0,0 +1,73 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +_version = '1' + +__all__ = ['get_path'] + + +def _make_pooch(): + import pooch + + return pooch.create( + path=pooch.os_cache('ess/powgen'), + env='ESS_DATA_DIR', + base_url='https://public.esss.dk/groups/scipp/ess/powgen/{version}/', + version=_version, + registry={ + # Files loadable with Mantid + 'PG3_4844_event.nxs': 'md5:d5ae38871d0a09a28ae01f85d969de1e', + 'PG3_4866_event.nxs': 'md5:3d543bc6a646e622b3f4542bc3435e7e', + 'PG3_5226_event.nxs': 'md5:58b386ebdfeb728d34fd3ba00a2d4f1e', + 'PG3_FERNS_d4832_2011_08_24.cal': 'md5:c181221ebef9fcf30114954268c7a6b6', + # Zipped Scipp HDF5 files + 'PG3_4844_event.zip': 'md5:a644c74f5e740385469b67431b690a3e', + 'PG3_4866_event.zip': 'md5:5bc49def987f0faeb212a406b92b548e', + 'PG3_FERNS_d4832_2011_08_24.zip': 'md5:0fef4ed5f61465eaaa3f87a18f5bb80d', + }, + ) + + +_pooch = _make_pooch() + + +def get_path(name: str, unzip: bool = False) -> str: + """ + Return the path to a data file bundled with scippneutron. + + This function only works with example data and cannot handle + paths to custom files. + """ + import pooch + + return _pooch.fetch(name, processor=pooch.Unzip() if unzip else None) + + +def mantid_sample_file() -> str: + return get_path('PG3_4844_event.nxs') + + +def mantid_vanadium_file() -> str: + return get_path('PG3_4866_event.nxs') + + +def mantid_empty_instrument_file() -> str: + return get_path('PG3_5226_event.nxs') + + +def mantid_calibration_file() -> str: + return get_path('PG3_FERNS_d4832_2011_08_24.cal') + + +def sample_file() -> str: + (path,) = get_path('PG3_4844_event.zip', unzip=True) + return path + + +def vanadium_file() -> str: + (path,) = get_path('PG3_4866_event.zip', unzip=True) + return path + + +def calibration_file() -> str: + (path,) = get_path('PG3_FERNS_d4832_2011_08_24.zip', unzip=True) + return path diff --git a/src/ess/diffraction/external/powgen/instrument_view.py b/src/ess/diffraction/external/powgen/instrument_view.py new file mode 100644 index 00000000..2755c67a --- /dev/null +++ b/src/ess/diffraction/external/powgen/instrument_view.py @@ -0,0 +1,42 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +from typing import Optional + +import scipp as sc +import scippneutron as scn + + +def instrument_view( + da: sc.DataArray, + positions: str = "position", + pixel_size: Optional[float] = None, + components: Optional[dict] = None, + **kwargs +): + """ + Instrument view for the POWGEN instrument, with adjusted default arguments. + + Parameters + ---------- + positions: + Key for coord/attr holding positions to use for pixels + pixel_size: + Custom pixel size to use for detector pixels + components: + Dictionary containing display names and corresponding + settings (also a Dictionary) for additional components to display + items with known positions to be shown + kwargs: + See :func:`scippneutron.instrument_view` + """ + # TODO: the camera argument does not work with the Plopp instrument view + # if 'camera' not in kwargs: + # kwargs = { + # **kwargs, 'camera': { + # 'position': sc.vector(value=[-3, 3, 3], + # unit=da.coords[positions].unit) + # } + # } + return scn.instrument_view( + da, positions=positions, components=components, pixel_size=pixel_size, **kwargs + ) diff --git a/src/ess/diffraction/filtering.py b/src/ess/diffraction/filtering.py new file mode 100644 index 00000000..e548e4e6 --- /dev/null +++ b/src/ess/diffraction/filtering.py @@ -0,0 +1,69 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +# @author Jan-Lukas Wynen +""" +Prototype for event filtering. + +IMPORTANT Will be moved to a different place and potentially modified. +""" +from contextlib import contextmanager +from numbers import Real + +import scipp as sc + + +def _equivalent_bin_indices(a, b) -> bool: + a_begin = a.bins.constituents['begin'].flatten(to='') + a_end = a.bins.constituents['end'].flatten(to='') + b_begin = b.bins.constituents['begin'].flatten(to='') + b_end = b.bins.constituents['end'].flatten(to='') + non_empty = a_begin != a_end + return ( + sc.all((a_begin == b_begin)[non_empty]).value + and sc.all((a_end == b_end)[non_empty]).value + ) + + +@contextmanager +def _temporary_bin_coord(data: sc.DataArray, name: str, coord: sc.Variable) -> None: + if not _equivalent_bin_indices(data, coord): + raise ValueError("data and coord do not have equivalent bin indices") + coord = sc.bins( + data=coord.bins.constituents['data'], + begin=data.bins.coords['pulse_time'].bins.constituents['begin'], + end=data.bins.coords['pulse_time'].bins.constituents['end'], + dim=coord.bins.constituents['dim'], + ) + data.bins.coords[name] = coord + yield + del data.bins.coords[name] + + +# TODO non-monotonic proton charge -> raise? +def _with_pulse_time_edges(da: sc.DataArray, dim: str) -> sc.DataArray: + pulse_time = da.coords[dim] + one = sc.scalar(1, dtype='int64', unit=pulse_time.unit) + lo = pulse_time[0] - one + hi = pulse_time[-1] + one + mid = sc.midpoints(pulse_time) + da.coords[dim] = sc.concat([lo, mid, hi], dim) + return da + + +def remove_bad_pulses( + data: sc.DataArray, *, proton_charge: sc.DataArray, threshold_factor: Real +) -> sc.DataArray: + """ + assumes that there are bad pulses + """ + min_charge = proton_charge.data.mean() * threshold_factor + good_pulse = _with_pulse_time_edges(proton_charge >= min_charge, proton_charge.dim) + with _temporary_bin_coord( + data, + 'good_pulse', + sc.lookup(good_pulse, good_pulse.dim)[data.bins.coords[good_pulse.dim]], + ): + filtered = data.group(sc.array(dims=['good_pulse'], values=[True])) + filtered = filtered.squeeze('good_pulse').copy(deep=False) + del filtered.coords['good_pulse'] + return filtered diff --git a/src/ess/diffraction/grouping.py b/src/ess/diffraction/grouping.py new file mode 100644 index 00000000..c8199257 --- /dev/null +++ b/src/ess/diffraction/grouping.py @@ -0,0 +1,25 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +import scipp as sc +from scippneutron.conversion.graph import beamline + + +def group_by_two_theta(data: sc.DataArray, *, edges: sc.Variable) -> sc.DataArray: + """ + Group data into two_theta bins. + + Parameters + ---------- + data: + Input data array with events. Must contain a coord or attr called 'two_theta' + or coords or attrs that can be used to compute it. + edges: + Bin edges in two_theta. `data` is grouped into those bins. + + Returns + ------- + : + `data` grouped into two_theta bins. + """ + out = data.transform_coords('two_theta', graph=beamline.beamline(scatter=True)) + return out.bin(two_theta=edges.to(unit=out.coords['two_theta'].unit, copy=False)) diff --git a/src/ess/diffraction/logging.py b/src/ess/diffraction/logging.py new file mode 100644 index 00000000..aed4ffa4 --- /dev/null +++ b/src/ess/diffraction/logging.py @@ -0,0 +1,16 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) + +from typing import Optional +import logging + + +def get_logger() -> logging.Logger: + """Return the logger for ess.diffraction. + + Returns + ------- + : + The requested logger. + """ + return logging.getLogger('scipp.ess.diffraction') diff --git a/src/ess/diffraction/powder/__init__.py b/src/ess/diffraction/powder/__init__.py new file mode 100644 index 00000000..7107a8d6 --- /dev/null +++ b/src/ess/diffraction/powder/__init__.py @@ -0,0 +1,9 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +""" +Components for powder diffraction experiments. +""" +from .conversions import to_dspacing_with_calibration +from .corrections import merge_calibration + +__all__ = ['merge_calibration', 'to_dspacing_with_calibration'] diff --git a/src/ess/diffraction/powder/conversions.py b/src/ess/diffraction/powder/conversions.py new file mode 100644 index 00000000..8ec9b6cc --- /dev/null +++ b/src/ess/diffraction/powder/conversions.py @@ -0,0 +1,160 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +""" +Coordinate transformations for powder diffraction. +""" + +import uuid +from typing import Optional + +import scipp as sc + +from ..logging import get_logger +from .corrections import merge_calibration + + +def _dspacing_from_diff_calibration_generic_impl(t, t0, a, c): + """ + This function implements the solution to + t = a * d^2 + c * d + t0 + for a != 0. + It uses the following way of expressing the solution with an order of operations + that is optimized for low memory usage. + d = (sqrt([x-t0+t] / x) - 1) * c / (2a) + x = c^2 / (4a) + """ + x = c**2 / (4 * a) + out = (x - t0) + t + out /= x + del x + sc.sqrt(out, out=out) + out -= 1 + out *= c / (2 * a) + return out + + +def _dspacing_from_diff_calibration_a0_impl(t, t0, c): + """ + This function implements the solution to + t = a * d^2 + c * d + t0 + for a == 0. + """ + out = t - t0 + out /= c + return out + + +def dspacing_from_diff_calibration( + tof: sc.Variable, + tzero: sc.Variable, + difa: sc.Variable, + difc: sc.Variable, + _tag_positions_consumed: sc.Variable, +) -> sc.Variable: + r""" + Compute d-spacing from calibration parameters. + + d-spacing is the positive solution of + + .. math:: \mathsf{tof} = \mathsf{DIFA} * d^2 + \mathsf{DIFC} * d + t_0 + + This function can be used with :func:`scipp.transform_coords`. + + See Also + -------- + ess.diffraction.conversions.to_dspacing_with_calibration + """ + if sc.all(difa == sc.scalar(0.0, unit=difa.unit)).value: + return _dspacing_from_diff_calibration_a0_impl(tof, tzero, difc) + return _dspacing_from_diff_calibration_generic_impl(tof, tzero, difa, difc) + + +def _restore_tof_if_in_wavelength(data: sc.DataArray) -> sc.DataArray: + if 'wavelength' not in data.dims: + return data + + get_logger().info( + "Discarding coordinate 'wavelength' in favor of 'tof'." + ) + temp_name = uuid.uuid4().hex + aux = data.transform_coords( + temp_name, + {temp_name: lambda wavelength, tof: tof}, + keep_inputs=False, + quiet=True, + ) + return aux.transform_coords( + 'tof', {'tof': temp_name}, keep_inputs=False, quiet=True + ) + + +def _consume_positions(position, sample_position, source_position): + _ = position + _ = sample_position + _ = source_position + return sc.scalar(0) + + +def to_dspacing_with_calibration( + data: sc.DataArray, *, calibration: Optional[sc.Dataset] = None +) -> sc.DataArray: + """ + Transform coordinates to d-spacing from calibration parameters. + + Computes d-spacing from time-of-flight stored in `data`. + + Attention + --------- + `data` may have a wavelength coordinate and dimension, + but those are discarded. + Only the stored time-of-flight is used, that is, any modifications to + the wavelength coordinate after it was computed from time-of-flight are lost. + + Raises + ------ + KeyError + If `data` does not contain a 'tof' metadata. + + Parameters + ---------- + data: + Input data in tof or wavelength dimension. + Must have a tof coordinate or attribute. + calibration: + Calibration data. If given, use it for the conversion. + Otherwise, the calibration data must be stored in `data`. + + Returns + ------- + : + A DataArray with the same data as the input and a 'dspacing' coordinate. + + See Also + -------- + ess.diffraction.conversions.dspacing_from_diff_calibration + """ + if calibration is not None: + out = merge_calibration(into=data, calibration=calibration) + else: + out = data + + out = _restore_tof_if_in_wavelength(out) + graph = { + 'dspacing': dspacing_from_diff_calibration, + } + + if 'position' in out.coords: + graph['_tag_positions_consumed'] = _consume_positions + else: + out.coords['_tag_positions_consumed'] = sc.scalar(0) + + # TODO: The need for attribute popping is a side-effect from using the deprecated + # scippneutron.load() function. Once we switch to using `load_with_mantid`, we + # should be able to remove this. + for key in ('difc', 'difa', 'tzero'): + if key not in out.coords: + out.coords[key] = out.attrs.pop(key) + + out = out.transform_coords('dspacing', graph=graph, keep_intermediate=False) + out.coords.pop('_tag_positions_consumed', None) + return out diff --git a/src/ess/diffraction/powder/corrections.py b/src/ess/diffraction/powder/corrections.py new file mode 100644 index 00000000..c9fcffdb --- /dev/null +++ b/src/ess/diffraction/powder/corrections.py @@ -0,0 +1,46 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +import scipp as sc + + +def merge_calibration(*, into: sc.DataArray, calibration: sc.Dataset) -> sc.DataArray: + """ + Return a scipp.DataArray containing calibration metadata. + + Parameters + ---------- + into: + Base data and metadata for the returned object. + calibration: + Calibration parameters. + + Returns + ------- + : + Copy of `into` with additional coordinates and masks + from `calibration`. + + See Also + -------- + ess.diffraction.load_calibration + """ + dim = calibration.dim + if not sc.identical(into.coords[dim], calibration.coords[dim]): + raise ValueError( + f'Coordinate {dim} of calibration and target dataset do not agree.' + ) + out = into.copy(deep=False) + for name in ('difa', 'difc', 'tzero'): + if name in out.meta: + raise ValueError( + f"Cannot add calibration parameter '{name}' to data, " + "there already is metadata with the same name." + ) + out.attrs[name] = calibration[name].data + if 'calibration' in out.masks: + raise ValueError( + "Cannot add calibration mask 'calibration' tp data, " + "there already is a mask with the same name." + ) + out.masks['calibration'] = calibration['mask'].data + return out diff --git a/src/ess/diffraction/smoothing.py b/src/ess/diffraction/smoothing.py new file mode 100644 index 00000000..42f34429 --- /dev/null +++ b/src/ess/diffraction/smoothing.py @@ -0,0 +1,86 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +""" +Smoothing arrays data. +""" + +from typing import Optional + +import scipp as sc +from scipp.scipy.signal import butter + +from .logging import get_logger + + +def _ensure_no_variances(var: sc.DataArray) -> sc.DataArray: + if var.variances is not None: + get_logger().warning( + 'Tried to smoothen data with uncertainties. ' + 'This is not supported because the results would be highly correlated.\n' + 'Instead, the variances are ignored and the output ' + 'will be returned without any!' + '\n--------------------------------------------------\n' + 'If you know a good solution for handling uncertainties in such a case, ' + 'please contact the scipp developers! (e.g. via https://github.com/scipp)' + '\n--------------------------------------------------\n' + ) + return sc.values(var) + return var + + +def lowpass( + da: sc.DataArray, *, dim: str, N: int, Wn: sc.Variable, coord: Optional[str] = None +) -> sc.DataArray: + """ + Smooth data using a lowpass frequency filter. + + Applies a lowpass Butterworth filter to `da.data` based on the sampling rate + defined by `coord`. + See :py:func:`scipp.signal.butter` for information on filter design. + + Important + --------- + If `coord` is bin-edges, it is first converted to bin-centers using + :func:`scipp.midpoints`. + This is only valid for linearly-spaced edges. + + Parameters + ---------- + da: + Data to smoothen. + dim: + Dimension along which to smooth. + coord: + Name of the coordinate that defines the sampling frequency. + Defaults to `dim`. + N: + Order of the lowpass filter. + Wn: + Critical frequency of the filter. + + Returns + ------- + : + Smoothed `da`. + + See Also + -------- + scipp.signal.butter scipp.signal.sosfiltfilt + + Examples + -------- + + >>> x = sc.linspace(dim='x', start=1.1, stop=4.0, num=1000, unit='m') + >>> y = sc.sin(x * sc.scalar(1.0, unit='rad/m')) + >>> y += sc.sin(x * sc.scalar(400.0, unit='rad/m')) + >>> noisy = sc.DataArray(data=y, coords={'x': x}) + >>> smooth = lowpass(noisy, dim='x', N=4, Wn=20 / x.unit) + """ + da = _ensure_no_variances(da) + coord = dim if coord is None else coord + + if da.coords[coord].sizes[dim] == da.sizes[dim] + 1: + da = da.copy(deep=False) + da.coords[coord] = sc.midpoints(da.coords[coord], dim) + + return butter(da.coords[coord], N=N, Wn=Wn).filtfilt(da, dim) diff --git a/tests/diffraction/__init__.py b/tests/diffraction/__init__.py new file mode 100644 index 00000000..df176980 --- /dev/null +++ b/tests/diffraction/__init__.py @@ -0,0 +1,2 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) diff --git a/tests/diffraction/filtering_test.py b/tests/diffraction/filtering_test.py new file mode 100644 index 00000000..6f13385f --- /dev/null +++ b/tests/diffraction/filtering_test.py @@ -0,0 +1,162 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +# @author Jan-Lukas Wynen +import numpy as np +import scipp as sc + +from ess.diffraction import filtering + + +def make_data_with_pulse_time(rng, n_event): + start_time = sc.scalar(np.datetime64('2022-03-14T14:42:37.12345', 'ns')) + pulse_time = start_time + sc.array( + dims=['event'], + values=rng.integers(0, 10**6, n_event), + unit='ns', + dtype='int64', + ) + events = sc.DataArray( + sc.array( + dims=['event'], + values=rng.uniform(0.0, 2.0, n_event), + unit='counts', + dtype='int64', + ), + coords={ + 'tof': sc.array( + dims=['event'], + values=rng.integers(10, 1000, n_event), + unit='us', + dtype='int64', + ), + 'pulse_time': pulse_time, + 'spectrum': sc.array( + dims=['event'], + values=rng.integers(0, 10, n_event), + unit=None, + dtype='int64', + ), + }, + ) + return sc.binning.make_binned( + events, + edges=[ + sc.array(dims=['tof'], values=[10, 500, 1000], unit='us', dtype='int64') + ], + groups=[sc.arange('spectrum', 0, 10, unit=None, dtype='int64')], + ) + + +def test_make_data_with_pulse_time(): + rng = np.random.default_rng(9461) + data = make_data_with_pulse_time(rng, 100) + assert 'pulse_time' in data.bins.coords + + +def make_data_with_pulse_time_and_proton_charge( + rng, n_event, n_proton_charge, bad_charge, bad_charge_indices +): + data = make_data_with_pulse_time(rng, n_event) + + start_time = data.bins.coords['pulse_time'].min() + pulse_time = start_time + sc.linspace( + 'pulse_time', 0, 10**6, n_proton_charge, unit='ns' + ).to(dtype='int64') + good_charge_value = bad_charge.value * 100 + proton_charge = sc.DataArray( + sc.array( + dims=['pulse_time'], + values=rng.uniform( + good_charge_value, good_charge_value * 1.2, n_proton_charge + ), + unit=bad_charge.unit, + ), + coords={'pulse_time': pulse_time}, + ) + + for i in bad_charge_indices: + proton_charge[i] = bad_charge + + data.attrs['proton_charge'] = sc.scalar(proton_charge) + return data + + +def test_make_data_with_pulse_time_and_proton_charge(): + rng = np.random.default_rng(65501) + bad_charge = sc.scalar(1.0e5, unit='pC') + data = make_data_with_pulse_time_and_proton_charge( + rng, 100, 300, bad_charge, [0, 2, 4] + ) + assert 'pulse_time' in data.bins.coords + assert sc.identical(data.attrs['proton_charge'].value.data[0], bad_charge) + assert sc.identical(data.attrs['proton_charge'].value.data[2], bad_charge) + assert sc.identical(data.attrs['proton_charge'].value.data[4], bad_charge) + assert (data.attrs['proton_charge'].value.data[1] > bad_charge).value + assert (data.attrs['proton_charge'].value.data[3] > bad_charge).value + + +def test_remove_bad_pulses_does_not_modify_input(): + rng = np.random.default_rng(65501) + bad_charge = sc.scalar(1.0e5, unit='pC') + data = make_data_with_pulse_time_and_proton_charge( + rng, 100, 300, bad_charge, bad_charge_indices=[0, 10, 100, 150, 200] + ) + original = data.copy() + _ = filtering.remove_bad_pulses( + data, proton_charge=data.attrs['proton_charge'].value, threshold_factor=0.9 + ) + assert sc.identical(data, original) + + +def test_remove_bad_pulses_without_bad_pulses(): + rng = np.random.default_rng(65501) + bad_charge = sc.scalar(1.0e5, unit='pC') + data = make_data_with_pulse_time_and_proton_charge( + rng, 100, 300, bad_charge, bad_charge_indices=[] + ) + filtered = filtering.remove_bad_pulses( + data, proton_charge=data.attrs['proton_charge'].value, threshold_factor=0.0 + ) + assert sc.identical(filtered, data) + + +def test_remove_bad_pulses_without_good_pulses(): + rng = np.random.default_rng(65501) + bad_charge = sc.scalar(1.0e5, unit='pC') + data = make_data_with_pulse_time_and_proton_charge( + rng, 100, 300, bad_charge, bad_charge_indices=np.arange(300) + ) + filtered = filtering.remove_bad_pulses( + data, proton_charge=data.attrs['proton_charge'].value, threshold_factor=10.0 + ) + empty = data.copy() + empty.bins.constituents['begin'][...] = sc.index(0) + empty.bins.constituents['end'][...] = sc.index(0) + assert sc.identical(filtered, empty) + + +def test_remove_bad_pulses_contiguous_section(): + rng = np.random.default_rng(65501) + bad_charge = sc.scalar(1.0e5, unit='pC') + bad_indices = np.arange(100, 120) + data = make_data_with_pulse_time_and_proton_charge( + rng, 100, 300, bad_charge, bad_indices + ) + + proton_charge = data.attrs['proton_charge'].value + begin_removed = proton_charge.coords['pulse_time'][100] + end_removed = proton_charge.coords['pulse_time'][120] + data.bins.coords['should_be_removed'] = ( + begin_removed < data.bins.coords['pulse_time'] + ) & (data.bins.coords['pulse_time'] < end_removed) + + filtered = filtering.remove_bad_pulses( + data, proton_charge=proton_charge, threshold_factor=0.9 + ) + + assert not sc.any(filtered.bins.coords['should_be_removed']).value + n_events_filtered = filtered.bins.size().sum().data + expected_n_events_filtered = ( + data.bins.size().sum().data - data.bins.coords['should_be_removed'].sum() + ) + assert sc.identical(n_events_filtered, expected_n_events_filtered) diff --git a/tests/diffraction/load_test.py b/tests/diffraction/load_test.py new file mode 100644 index 00000000..0cebd9ab --- /dev/null +++ b/tests/diffraction/load_test.py @@ -0,0 +1,42 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +import pytest + +from ess.diffraction.external import load_calibration +from ess.diffraction.external.powgen import data + + +@pytest.mark.skip( + reason='mantid.LoadDiffCal causes SEGFAULT on CI but seems to work fine elsewhere' +) +def test_load_calibration_loads_required_data(): + loaded = load_calibration( + data.calibration_file(), instrument_filename='POWGEN_Definition_2011-02-25.xml' + ) + + assert 'difa' in loaded + assert 'difc' in loaded + assert 'tzero' in loaded + assert 'mask' in loaded + assert 'detector' in loaded.coords + assert loaded.dims == ['detector'] + + +@pytest.mark.skip( + reason='mantid.LoadDiffCal causes SEGFAULT on CI but seems to work fine elsewhere' +) +def test_load_calibration_requires_instrument_definition(): + with pytest.raises(ValueError): + load_calibration(data.calibration_file()) + + +@pytest.mark.skip( + reason='mantid.LoadDiffCal causes SEGFAULT on CI but seems to work fine elsewhere' +) +def test_load_calibration_can_only_have_1_instrument_definition(): + with pytest.raises(ValueError): + load_calibration( + data.calibration_file(), + instrument_name='POWGEN', + instrument_filename='POWGEN_Definition_2011-02-25.xml', + ) diff --git a/tests/diffraction/powder/__init__.py b/tests/diffraction/powder/__init__.py new file mode 100644 index 00000000..df176980 --- /dev/null +++ b/tests/diffraction/powder/__init__.py @@ -0,0 +1,2 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) diff --git a/tests/diffraction/powder/conversions_test.py b/tests/diffraction/powder/conversions_test.py new file mode 100644 index 00000000..e5c1ab17 --- /dev/null +++ b/tests/diffraction/powder/conversions_test.py @@ -0,0 +1,131 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +import numpy as np +import pytest +import scipp as sc + +from ess.diffraction.powder import to_dspacing_with_calibration + + +@pytest.fixture(params=['random', 'zero']) +def calibration(request): + rng = np.random.default_rng(789236) + n = 30 + ds = sc.Dataset( + data={ + 'difa': sc.array( + dims=['spectrum'], + values=rng.uniform(1.0e2, 1.0e3, n), + unit='us / angstrom**2', + ), + 'difc': sc.array( + dims=['spectrum'], + values=rng.uniform(1.0e3, 1.0e4, n), + unit='us / angstrom', + ), + 'tzero': sc.array( + dims=['spectrum'], values=rng.uniform(-1e2, 1e2, n), unit='us' + ), + 'mask': sc.full(dims=['spectrum'], shape=[n], value=False, unit=None), + }, + coords={'spectrum': sc.arange('spectrum', n, unit=None)}, + ) + if request.param == 'zero': + ds['difa'].data = sc.zeros_like(ds['difa'].data) + return ds + + +def test_dspacing_with_calibration_roundtrip(calibration): + initial_tof = sc.DataArray( + sc.ones(dims=['spectrum', 'tof'], shape=[calibration.sizes['spectrum'], 27]), + coords={ + 'spectrum': calibration.coords['spectrum'], + 'tof': sc.linspace('tof', 1.0, 1000.0, 27, unit='us'), + }, + ) + dspacing = to_dspacing_with_calibration(initial_tof, calibration=calibration) + + d = dspacing.coords['dspacing'] + difa = calibration['difa'].data + difc = calibration['difc'].data + tzero = calibration['tzero'].data + recomputed_tof = difa * d**2 + difc * d + tzero + recomputed_tof = recomputed_tof.rename_dims({'dspacing': 'tof'}) + assert sc.allclose(recomputed_tof, initial_tof.coords['tof']) + + +def test_dspacing_with_calibration_roundtrip_with_wavelength(calibration): + initial_wavelength = sc.DataArray( + sc.ones( + dims=['spectrum', 'wavelength'], shape=[calibration.sizes['spectrum'], 27] + ), + coords={ + 'spectrum': calibration.coords['spectrum'], + 'wavelength': sc.linspace('wavelength', 10.0, 100.0, 27, unit='angstrom'), + 'tof': sc.linspace('wavelength', 1.0, 1000.0, 27, unit='us'), + }, + ) + dspacing = to_dspacing_with_calibration(initial_wavelength, calibration=calibration) + + d = dspacing.coords['dspacing'] + difa = calibration['difa'].data + difc = calibration['difc'].data + tzero = calibration['tzero'].data + recomputed_tof = difa * d**2 + difc * d + tzero + recomputed_tof = recomputed_tof.rename_dims({'dspacing': 'tof'}) + assert sc.allclose( + recomputed_tof, + initial_wavelength.coords['tof'].rename_dims({'wavelength': 'tof'}), + ) + + +def test_dspacing_with_calibration_consumes_positions(calibration): + rng = np.random.default_rng(9274) + n_spectra = calibration.sizes['spectrum'] + tof = sc.DataArray( + sc.ones(dims=['spectrum', 'tof'], shape=[calibration.sizes['spectrum'], 27]), + coords={ + 'spectrum': calibration.coords['spectrum'], + 'tof': sc.linspace('tof', 1.0, 1000.0, 27, unit='us'), + 'position': sc.vectors( + dims=['spectrum'], + values=rng.uniform(-2.0, 2.0, (n_spectra, 3)), + unit='m', + ), + 'sample_position': sc.vector(value=[0.1, 0.02, 0.0], unit='m'), + 'source_position': sc.vector(value=[-10.0, -1.0, 0.0], unit='m'), + }, + ) + dspacing = to_dspacing_with_calibration(tof, calibration=calibration) + assert sc.identical(dspacing.coords['position'], tof.coords['position']) + assert sc.identical( + dspacing.coords['sample_position'], tof.coords['sample_position'] + ) + assert sc.identical( + dspacing.coords['source_position'], tof.coords['source_position'] + ) + + +def test_dspacing_with_calibration_does_not_use_positions(calibration): + rng = np.random.default_rng(91032) + n_spectra = calibration.sizes['spectrum'] + tof_no_pos = sc.DataArray( + sc.ones(dims=['spectrum', 'tof'], shape=[n_spectra, 27]), + coords={ + 'spectrum': calibration.coords['spectrum'], + 'tof': sc.linspace('tof', 1.0, 1000.0, 27, unit='us'), + }, + ) + tof_pos = tof_no_pos.copy() + tof_pos.coords['position'] = sc.vectors( + dims=['spectrum'], values=rng.uniform(-2.0, 2.0, (n_spectra, 3)), unit='m' + ) + tof_pos.coords['sample_position'] = sc.vector(value=[0.1, 0.02, 0.0], unit='m') + tof_pos.coords['source_position'] = sc.vector(value=[-10.0, -1.0, 0.0], unit='m') + + dspacing_no_pos = to_dspacing_with_calibration(tof_no_pos, calibration=calibration) + dspacing_pos = to_dspacing_with_calibration(tof_pos, calibration=calibration) + + assert sc.allclose( + dspacing_no_pos.coords['dspacing'], dspacing_pos.coords['dspacing'] + ) diff --git a/tests/diffraction/powder/corrections_test.py b/tests/diffraction/powder/corrections_test.py new file mode 100644 index 00000000..da9c253c --- /dev/null +++ b/tests/diffraction/powder/corrections_test.py @@ -0,0 +1,111 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +import numpy as np +import pytest +import scipp as sc + +from ess.diffraction.powder import merge_calibration + + +@pytest.fixture +def calibration(): + rng = np.random.default_rng(789236) + n = 30 + ds = sc.Dataset( + data={ + 'difa': sc.array( + dims=['spectrum'], + values=rng.uniform(1.0e2, 1.0e3, n), + unit='us / angstrom**2', + ), + 'difc': sc.array( + dims=['spectrum'], + values=rng.uniform(1.0e3, 1.0e4, n), + unit='us / angstrom', + ), + 'tzero': sc.array( + dims=['spectrum'], values=rng.uniform(-1e2, 1e2, n), unit='us' + ), + 'mask': sc.full(dims=['spectrum'], shape=[n], value=False, unit=None), + }, + coords={'spectrum': sc.arange('spectrum', n, unit=None)}, + ) + return ds + + +def test_merge_calibration_add_all_parameters(calibration): + da = sc.DataArray( + sc.ones(sizes=calibration.sizes), + coords={ + 'spectrum': sc.arange('spectrum', calibration.sizes['spectrum'], unit=None) + }, + ) + with_cal = merge_calibration(into=da, calibration=calibration) + + assert sc.identical(with_cal.attrs['difa'], calibration['difa'].data) + assert sc.identical(with_cal.attrs['difc'], calibration['difc'].data) + assert sc.identical(with_cal.attrs['tzero'], calibration['tzero'].data) + assert sc.identical(with_cal.masks['calibration'], calibration['mask'].data) + + +def test_merge_calibration_raises_if_spectrum_mismatch(calibration): + da = sc.DataArray( + sc.ones(sizes=calibration.sizes), + coords={ + 'spectrum': sc.zeros( + sizes={'spectrum': calibration.sizes['spectrum']}, unit=None + ) + }, + ) + with pytest.raises(ValueError): + merge_calibration(into=da, calibration=calibration) + + +def test_merge_calibration_raises_if_difa_exists(calibration): + da = sc.DataArray( + sc.ones(sizes=calibration.sizes), + coords={ + 'spectrum': sc.arange('spectrum', calibration.sizes['spectrum'], unit=None), + 'difa': sc.ones(sizes={'spectrum': calibration.sizes['spectrum']}), + }, + ) + with pytest.raises(ValueError): + merge_calibration(into=da, calibration=calibration) + + +def test_merge_calibration_raises_if_difc_exists(calibration): + da = sc.DataArray( + sc.ones(sizes=calibration.sizes), + coords={ + 'spectrum': sc.arange('spectrum', calibration.sizes['spectrum'], unit=None), + 'difc': sc.ones(sizes={'spectrum': calibration.sizes['spectrum']}), + }, + ) + with pytest.raises(ValueError): + merge_calibration(into=da, calibration=calibration) + + +def test_merge_calibration_raises_if_tzero_exists(calibration): + da = sc.DataArray( + sc.ones(sizes=calibration.sizes), + coords={ + 'spectrum': sc.arange('spectrum', calibration.sizes['spectrum'], unit=None) + }, + attrs={'tzero': sc.ones(sizes={'spectrum': calibration.sizes['spectrum']})}, + ) + with pytest.raises(ValueError): + merge_calibration(into=da, calibration=calibration) + + +def test_merge_calibration_raises_if_mask_exists(calibration): + da = sc.DataArray( + sc.ones(sizes=calibration.sizes), + coords={ + 'spectrum': sc.arange('spectrum', calibration.sizes['spectrum'], unit=None) + }, + masks={ + 'calibration': sc.ones(sizes={'spectrum': calibration.sizes['spectrum']}) + }, + ) + with pytest.raises(ValueError): + merge_calibration(into=da, calibration=calibration) From fd4a2f222499194359db3821dcb2aaf783f13b88 Mon Sep 17 00:00:00 2001 From: Jan-Lukas Wynen Date: Mon, 20 Nov 2023 15:40:34 +0100 Subject: [PATCH 2/3] Copy docs from legacy ess --- docs/api-reference/index.md | 2 +- docs/conf.py | 10 +- docs/examples/index.md | 9 + docs/examples/powgen_reduction.ipynb | 697 +++++++++++++++++++++++++++ docs/examples/preprocess_files.ipynb | 282 +++++++++++ docs/index.md | 7 +- requirements/docs.in | 1 + requirements/docs.txt | 8 +- 8 files changed, 1005 insertions(+), 11 deletions(-) create mode 100644 docs/examples/index.md create mode 100644 docs/examples/powgen_reduction.ipynb create mode 100644 docs/examples/preprocess_files.ipynb diff --git a/docs/api-reference/index.md b/docs/api-reference/index.md index d8d95471..06485f67 100644 --- a/docs/api-reference/index.md +++ b/docs/api-reference/index.md @@ -3,7 +3,7 @@ ## Classes ```{eval-rst} -.. currentmodule:: essdiffraction +.. currentmodule:: ess.diffraction .. autosummary:: :toctree: ../generated/classes diff --git a/docs/conf.py b/docs/conf.py index 36a9e59b..3e030650 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -107,7 +107,13 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '**.ipynb_checkpoints'] +exclude_patterns = [ + '_build', + 'Thumbs.db', + '.DS_Store', + '**.ipynb_checkpoints', + 'examples/preprocess_files.ipynb', +] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' @@ -187,7 +193,7 @@ # -- Options for Matplotlib in notebooks ---------------------------------- nbsphinx_execute_arguments = [ - "--Session.metadata=scipp_docs_build=True", + "--Session.metadata=scipp_sphinx_build=True", ] # -- Options for doctest -------------------------------------------------- diff --git a/docs/examples/index.md b/docs/examples/index.md new file mode 100644 index 00000000..34638985 --- /dev/null +++ b/docs/examples/index.md @@ -0,0 +1,9 @@ +# Examples + +```{toctree} +--- +maxdepth: 2 +--- + +powgen_reduction +``` diff --git a/docs/examples/powgen_reduction.ipynb b/docs/examples/powgen_reduction.ipynb new file mode 100644 index 00000000..da166b39 --- /dev/null +++ b/docs/examples/powgen_reduction.ipynb @@ -0,0 +1,697 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "bedd5844-1c5c-4fcf-a311-e0625b5e23b7", + "metadata": {}, + "source": [ + "# Data reduction for POWGEN" + ] + }, + { + "cell_type": "markdown", + "id": "094942ff-1bde-46c2-ae50-d00177eca3ad", + "metadata": {}, + "source": [ + "This notebook shows a basic reduction workflow for powder diffraction for the SNS [POWGEN](https://sns.gov/powgen) instrument.\n", + "It serves mainly to develop and present routines for powder diffraction and will eventually be removed in favor of a workflow for DREAM at ESS.\n", + "\n", + "**Note** that we load functions from `external` modules.\n", + "These modules will be removed when their ESS counterparts exist." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b58104c6-a196-4576-b3f0-9fb6fb1216e9", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "import scipp as sc\n", + "import scippneutron as scn\n", + "import plopp as pp\n", + "\n", + "import ess\n", + "from ess.diffraction import powder\n", + "from ess import diffraction\n", + "from ess.diffraction.external import powgen" + ] + }, + { + "cell_type": "markdown", + "id": "da4040e8-681e-4eb7-a205-af6a88539ffa", + "metadata": {}, + "source": [ + "## Load data" + ] + }, + { + "cell_type": "markdown", + "id": "8db8e21f-f19c-4498-a4d1-d34142b2ba02", + "metadata": {}, + "source": [ + "Load the sample data.\n", + "\n", + "**Note:** We get the file name from `powgen.data`.\n", + "This module provides access to managed example files.\n", + "In the real world, we would need to find the file name in a different way.\n", + "But here, the data has been converted to a [Scipp HDF5 file](https://scipp.github.io/user-guide/reading-and-writing-files.html#HDF5)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c1d52d95-727c-4d67-b1f9-dc8c4e345d15", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sample_full = sc.io.load_hdf5(powgen.data.sample_file())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "723704af-1dc1-4cce-a396-77a754b19d77", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sample_full" + ] + }, + { + "cell_type": "markdown", + "id": "bd6c57e1-183e-4f1d-ab3e-1c0cb0fd7e59", + "metadata": {}, + "source": [ + "The loaded data group contains some auxiliary detector info that we need later.\n", + "The events are" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7216e1d6-844c-4622-9d84-91bb300b87a0", + "metadata": {}, + "outputs": [], + "source": [ + "sample = sample_full['data']\n", + "sample" + ] + }, + { + "cell_type": "markdown", + "id": "bab068d8-d02c-4bec-a9a3-2c3b713ab750", + "metadata": {}, + "source": [ + "## Inspect the raw data" + ] + }, + { + "cell_type": "markdown", + "id": "330aaf4b-465f-47e9-bede-45183971f9f6", + "metadata": {}, + "source": [ + "We can plot the data array to get an idea of its contents." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3848a30b-b872-465c-a1ca-2fad3a5110cb", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sample.hist(spectrum=500, tof=400).plot()" + ] + }, + { + "cell_type": "markdown", + "id": "59585c24-349f-4c5b-ab35-b17772712688", + "metadata": {}, + "source": [ + "We can see how that data maps onto the detector by using POWGEN's instrument view." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6e5a4d3c-cb93-4875-b2be-9e73df22771d", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "scn.instrument_view(sample.hist())" + ] + }, + { + "cell_type": "markdown", + "id": "07912224-f459-4c42-95e0-58d99291a713", + "metadata": {}, + "source": [ + "## Filter out invalid events" + ] + }, + { + "cell_type": "markdown", + "id": "a5121997-9522-4cd7-bbc4-468b38001aee", + "metadata": {}, + "source": [ + "The file contains events that cannot be associated with a specific pulse.\n", + "We can get a range of valid time-of-flight values from the instrument characterization file associated with the current run.\n", + "There is currently no mechanism in `scippneutron` or `ess` to load such a file as it is not clear if ESS will use this approach.\n", + "The values used below are taken from `PG3_characterization_2011_08_31-HR.txt` which is part of the sample files of Mantid.\n", + "See, e.g., [PowderDiffractionReduction](https://www.mantidproject.org/PowderDiffractionReduction).\n", + "\n", + "We remove all events that have a time-of-flight value outside the valid range:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ced73191-082a-4c7c-aa58-6111cd3cf1fd", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sample = sample.bin(tof=sc.array(dims=['tof'], values=[0.0, 16666.67], unit='us'))" + ] + }, + { + "cell_type": "markdown", + "id": "2de9ce54-f972-474a-ae82-c9754b80719f", + "metadata": {}, + "source": [ + "## Normalize by proton charge" + ] + }, + { + "cell_type": "markdown", + "id": "ebfdcccc-7638-4dbb-94f4-cf070d9ed28d", + "metadata": {}, + "source": [ + "Next, we normalize the data by the proton charge." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "85bd7848-a7db-42c6-974a-1eb5bf3e9860", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sample /= sample.coords['gd_prtn_chrg']" + ] + }, + { + "cell_type": "markdown", + "id": "b0e9bd7b-897f-41a1-80f7-1a12c8424c82", + "metadata": {}, + "source": [ + "We can check the unit of the event weight to see that the data was indeed divided by a charge." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "063b72ab-7b38-47fa-a180-e4cb659efb1f", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sample.data.values[0].unit" + ] + }, + { + "cell_type": "markdown", + "id": "52e9d8d7-474c-423b-baf4-e9e135f6e46d", + "metadata": {}, + "source": [ + "## Compute d-spacing" + ] + }, + { + "cell_type": "markdown", + "id": "d9291753-2b26-4d5d-8396-f4d3a2a9abfc", + "metadata": {}, + "source": [ + "Here, we compute d-spacing using calibration parameters provided in an example file.\n", + "First, we load the calibration parameters.\n", + "\n", + "**Note:** ESS instruments will use a different, yet to be determined way of encoding calibration parameters." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1ca5395a-4a43-4eaa-85fa-f43e6ac1a576", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "cal = sc.io.load_hdf5(powgen.data.calibration_file())" + ] + }, + { + "cell_type": "markdown", + "id": "02c9608f-7b1d-412b-a4f0-df064b340916", + "metadata": {}, + "source": [ + "The calibration is loaded with a 'detector' dimension.\n", + "Compute the corresponding spectrum indices using the detector info loaded as part of the sample data." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d9b964ee-43b3-4bc1-b079-dfede08d3208", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "cal = powgen.beamline.map_detector_to_spectrum(\n", + " cal, detector_info=sample_full['detector_info']\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "59566cfb-e175-44ea-bd09-bd2324514405", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "cal" + ] + }, + { + "cell_type": "markdown", + "id": "63553acb-4f78-4518-b5f8-6dc71dc616a5", + "metadata": {}, + "source": [ + "Now when can compute d-spacing for the sample using the calibration parameters." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "69388346-24ab-4035-b18d-b10bf274e4c4", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "sample_dspacing = powder.to_dspacing_with_calibration(sample, calibration=cal)" + ] + }, + { + "cell_type": "markdown", + "id": "bc226f10-71fe-4571-8391-db35028c4c76", + "metadata": { + "tags": [] + }, + "source": [ + "## Vanadium correction" + ] + }, + { + "cell_type": "markdown", + "id": "f670ce66-38cf-44ff-ad7c-d5dd30e3b09e", + "metadata": {}, + "source": [ + "Before we can process the d-spacing distribution further, we need to normalize the data by a vanadium measurement." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b9494f15-1fb2-4236-95a7-800a04896e2f", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "vana_full = sc.io.load_hdf5(powgen.data.vanadium_file())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "19cd62a0-94ef-4dd4-b00e-7d3d9d6c9d50", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "vana_full" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cb9a0742-9887-4166-a8fe-29b3064c8e20", + "metadata": {}, + "outputs": [], + "source": [ + "vana = vana_full['data']\n", + "vana" + ] + }, + { + "cell_type": "markdown", + "id": "6280fb1c-e3a2-4042-b76d-e08e31187af7", + "metadata": {}, + "source": [ + "Now we process the vanadium data in a similar was as the sample data." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2665c1ec-ae0b-440b-b4ab-60cb03f983a2", + "metadata": {}, + "outputs": [], + "source": [ + "vana /= vana.coords['gd_prtn_chrg']" + ] + }, + { + "cell_type": "markdown", + "id": "97ecaa8f-c905-4d02-aad6-dc7ef7c707a8", + "metadata": {}, + "source": [ + "### Removing the variances of the Vanadium data" + ] + }, + { + "cell_type": "markdown", + "id": "3d41c537-40b1-4ea7-87b1-f0dc3929ef55", + "metadata": {}, + "source": [ + "
\n", + "\n", + "**Warning**\n", + " \n", + "Heybrock et al. (2023) have shown that Scipp's uncertainty propagation is not suited for broadcast operations\n", + "such as normalizing event counts by a scalar value, which is the case when normalizing by Vanadium counts.\n", + "These operations are forbidden in recent versions of Scipp.\n", + "Until an alternative method is found to satisfactorily track the variances in this workflow,\n", + "we remove the variances in the Vanadium data.\n", + "The issue is tracked [here](https://github.com/scipp/ess/issues/171).\n", + "\n", + "
" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7ee60cf0-f2f5-426b-89e7-aa137a73b8c5", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "vana.bins.constituents['data'].variances = None" + ] + }, + { + "cell_type": "markdown", + "id": "31a1525b-c5a2-4164-862d-5e2152d84c83", + "metadata": {}, + "source": [ + "### Conversion to d-spacing\n", + "\n", + "Now, we compute d-spacing using the same calibration parameters as before." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "03105e85-f66c-45b4-9b50-adb9192763a1", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "vana_dspacing = powder.to_dspacing_with_calibration(vana, calibration=cal)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "70fd1e76-a16d-4b6d-94d0-fb1284cb26a4", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "vana_dspacing" + ] + }, + { + "cell_type": "markdown", + "id": "0441b09d-9b7e-45e6-a154-a822a21aebb2", + "metadata": {}, + "source": [ + "## Inspect d-spacing" + ] + }, + { + "cell_type": "markdown", + "id": "dd2ef840-a5aa-45dd-aaea-c426dea00a3d", + "metadata": {}, + "source": [ + "We need to histogram the events in order to normalize our sample data by vanadium.\n", + "For consistency, we use these bin edges for both vanadium and the sample data." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ba9ab7db-5752-4130-8cfb-ec00c70feed8", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "d = vana_dspacing.coords['dspacing']\n", + "dspacing_edges = sc.linspace('dspacing', d.min().value, d.max().value, 200, unit=d.unit)" + ] + }, + { + "cell_type": "markdown", + "id": "2278a3db-6b79-4421-a1ff-2628226f4166", + "metadata": {}, + "source": [ + "### All spectra combined" + ] + }, + { + "cell_type": "markdown", + "id": "84aac5f6-38bd-4246-974b-2a94c2dd80d0", + "metadata": {}, + "source": [ + "We start simple by combining all spectra using `data.bins.concat('spectrum')`.\n", + "Then, we can normalize the same data by vanadium to get a d-spacing distribution.\n", + "\n", + "**Note that because we removed the variances on the Vanadium data, after the following cell, the standard deviations on the result are underestimated.**" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5bd287a8-d26e-4134-b1b5-3ed686cca417", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "all_spectra = diffraction.normalize_by_vanadium(\n", + " sample_dspacing.bins.concat('spectrum'),\n", + " vanadium=vana_dspacing.bins.concat('spectrum'),\n", + " edges=dspacing_edges,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5ef778f1-e23a-492f-9a08-9a08e889ad83", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "all_spectra.hist(dspacing=dspacing_edges).plot()" + ] + }, + { + "cell_type": "markdown", + "id": "df5f7a70-ac69-47dc-b0c1-1139728d2878", + "metadata": {}, + "source": [ + "### Group into $2\\theta$ bins" + ] + }, + { + "cell_type": "markdown", + "id": "fb474efb-692f-4f2d-9243-23ff9319dd76", + "metadata": {}, + "source": [ + "For a better resolution, we now group the sample and vanadium data into a number of bins in the scattering angle $2\\theta$ (see [here](https://scipp.github.io/scippneutron/user-guide/coordinate-transformations.html))\n", + "and normalize each individually." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a412534e-06e5-4723-9856-37e652dfacf7", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "two_theta = sc.linspace(dim='two_theta', unit='deg', start=25.0, stop=90.0, num=16)\n", + "sample_by_two_theta = diffraction.group_by_two_theta(sample_dspacing, edges=two_theta)\n", + "vana_by_two_theta = diffraction.group_by_two_theta(vana_dspacing, edges=two_theta)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0532df2d-a907-40ed-8271-2dc3a71b04ee", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "normalized = diffraction.normalize_by_vanadium(\n", + " sample_by_two_theta, vanadium=vana_by_two_theta, edges=dspacing_edges\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "84c1b8a6-504f-4627-9463-5ff70a6575fd", + "metadata": {}, + "source": [ + "Histogram the results in order to get a useful binning in the following plots." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "014d6ef4-7905-42bf-8336-df7095de47dc", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "normalized = normalized.hist(dspacing=dspacing_edges)" + ] + }, + { + "cell_type": "markdown", + "id": "460c3a5e-2ab6-4dba-8f78-e4cd126960e8", + "metadata": {}, + "source": [ + "Now we can inspect the d-spacing distribution as a function of $2\\theta$." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e66a78f7-e91c-4f81-a3ad-e77f6a4c05ae", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "normalized.plot()" + ] + }, + { + "cell_type": "markdown", + "id": "336a7157-1832-4616-b97f-1372fdad3c50", + "metadata": {}, + "source": [ + "In order to get 1-dimensional plots, we can select some ranges of scattering angles." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "694c8d66-58ea-4645-adca-b33d95f81d23", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "angle = sc.midpoints(normalized.coords['two_theta']).values\n", + "results = {\n", + " f'{round(angle[group], 3)} rad': normalized['two_theta', group]\n", + " for group in range(2, 6)\n", + "}\n", + "sc.plot(results)" + ] + }, + { + "cell_type": "markdown", + "id": "02bd3242-7e90-4279-8cbf-341f2bd37d0c", + "metadata": {}, + "source": [ + "Or interactively by plotting with a 1d projection." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c41d0965-dfef-40e3-bb9a-5629f51c8b68", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "%matplotlib widget\n", + "pp.superplot(normalized)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/examples/preprocess_files.ipynb b/docs/examples/preprocess_files.ipynb new file mode 100644 index 00000000..bf565434 --- /dev/null +++ b/docs/examples/preprocess_files.ipynb @@ -0,0 +1,282 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "ffbf4d7c-6eec-4be3-aa60-9235e226304c", + "metadata": {}, + "source": [ + "# Preprocess POWGEN files\n", + "\n", + "Loads test data files with Mantid and stores them as Scipp HDF5 files to be used in the example workflow." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b58104c6-a196-4576-b3f0-9fb6fb1216e9", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "import scipp as sc\n", + "import scippneutron as scn\n", + "import plopp as pp\n", + "\n", + "import ess\n", + "from ess.diffraction.external import load_calibration\n", + "from ess.diffraction import powder\n", + "from ess import diffraction\n", + "from ess.external import powgen" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c88d48df-272a-4f83-8b4a-43450a365b1f", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "ess.logging.configure_workflow('powgen_preprocess', filename=None)" + ] + }, + { + "cell_type": "markdown", + "id": "136323fc-b86c-46b3-a4f8-632151d4fb9a", + "metadata": {}, + "source": [ + "## Sample" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "122e026b-09ce-45d0-bbb6-f839faa534b6", + "metadata": {}, + "outputs": [], + "source": [ + "sample = scn.load(powgen.data.mantid_sample_file(),\n", + " advanced_geometry=True,\n", + " load_pulse_times=False,\n", + " mantid_args={\"LoadMonitors\": False})" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d11a14a8-0fb7-4857-a234-e055d61d06ff", + "metadata": {}, + "outputs": [], + "source": [ + "sample" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ab654cbe-44e4-4925-8e51-3695bc6fc193", + "metadata": {}, + "outputs": [], + "source": [ + "sample.coords['gd_prtn_chrg'] = sample.attrs.pop('gd_prtn_chrg')\n", + "sample.coords.set_aligned('gd_prtn_chrg', False)\n", + "sample.attrs.clear()\n", + "sample_dg = sc.DataGroup({\n", + " 'data': sample,\n", + " 'detector_info': sample.coords.pop('detector_info').value,\n", + "})" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2d878953-7056-48c5-8f2c-870f8f980046", + "metadata": {}, + "outputs": [], + "source": [ + "sample_dg" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f63e67ec-3199-426b-8e0e-e5ac835f3548", + "metadata": {}, + "outputs": [], + "source": [ + "sample_dg.save_hdf5('PG3_4844_event.h5')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2b3863ed-ee5e-4cc0-a8fb-6ed8cdc66687", + "metadata": {}, + "outputs": [], + "source": [ + "sc.io.load_hdf5('PG3_4844_event.h5')" + ] + }, + { + "cell_type": "markdown", + "id": "6ba80e9c-9e09-4f8d-8c8b-0fab13a65483", + "metadata": {}, + "source": [ + "## Vana" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "be8ed197-1b2d-442f-9f0e-ca64aa128c7d", + "metadata": {}, + "outputs": [], + "source": [ + "vana = scn.load(powgen.data.mantid_vanadium_file(),\n", + " advanced_geometry=False,\n", + " load_pulse_times=True,\n", + " mantid_args={\"LoadMonitors\": False})" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5dde24e6-57ca-4d23-b7b2-b3937c6f61be", + "metadata": {}, + "outputs": [], + "source": [ + "vana.coords['gd_prtn_chrg'] = vana.attrs.pop('gd_prtn_chrg')\n", + "vana.coords.set_aligned('gd_prtn_chrg', False)\n", + "vana_dg = sc.DataGroup({\n", + " 'data': vana,\n", + " 'proton_charge': vana.attrs.pop('proton_charge').value.rename(time='pulse_time')\n", + "})\n", + "vana.attrs.clear()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4e35b7d7-7f65-4551-bf9a-99c4fd1118e6", + "metadata": {}, + "outputs": [], + "source": [ + "vana_dg" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0065ddd4-4d7e-4b06-b133-f56b43b9aa18", + "metadata": {}, + "outputs": [], + "source": [ + "vana_dg['data'].bins.constituents['data']" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ec0493a6-1b1f-42da-bc05-f5f511a95866", + "metadata": {}, + "outputs": [], + "source": [ + "vana_dg['data'].bins.constituents['data'].coords['tof']" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dd961cbf-ba31-499f-9d8b-f28764f7314f", + "metadata": {}, + "outputs": [], + "source": [ + "vana_dg.save_hdf5('PG3_4866_event.h5')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a85a87e6-0cd8-4b9d-9a70-b9046da32973", + "metadata": {}, + "outputs": [], + "source": [ + "sc.io.load_hdf5('PG3_4866_event.h5')" + ] + }, + { + "cell_type": "markdown", + "id": "237910ef-43c0-4de1-b020-780cf99239de", + "metadata": {}, + "source": [ + "## Calibration" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6a667f13-667a-4674-b6ab-b64ebd6b2ee5", + "metadata": {}, + "outputs": [], + "source": [ + "cal = load_calibration(\n", + " powgen.data.mantid_calibration_file(),\n", + " instrument_filename='POWGEN_Definition_2011-02-25.xml',\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b8039daf-c52e-4e6e-9fa1-212807de45ec", + "metadata": {}, + "outputs": [], + "source": [ + "cal" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e0a850c2-56ec-4aa9-b8fa-9b59223dfcc3", + "metadata": {}, + "outputs": [], + "source": [ + "cal.save_hdf5('PG3_FERNS_d4832_2011_08_24.h5')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6ba4e3e4-da92-40d9-92cb-e38afc8a339c", + "metadata": {}, + "outputs": [], + "source": [ + "sc.io.load_hdf5('PG3_FERNS_d4832_2011_08_24.h5')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.17" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/index.md b/docs/index.md index a37ad469..b793a0bb 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,10 +1,4 @@ -<<<<<<< before updating # **ess**diffraction -||||||| last update -# EssDiffraction -======= -# ESSDiffraction ->>>>>>> after updating Diffraction data reduction for the European Spallation Source @@ -16,6 +10,7 @@ hidden: --- +examples/index api-reference/index developer/index about/index diff --git a/requirements/docs.in b/requirements/docs.in index f0e0fb70..18c87739 100644 --- a/requirements/docs.in +++ b/requirements/docs.in @@ -1,5 +1,6 @@ -r base.in ipykernel +ipympl ipython!=8.7.0 # Breaks syntax highlighting in Jupyter code cells. myst-parser nbsphinx diff --git a/requirements/docs.txt b/requirements/docs.txt index 15202ed1..529eb932 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,4 +1,4 @@ -# SHA1:29455914eb25d3d8b9873b30f928efd76745ad2b +# SHA1:1881d7128e55429185bf3016ebfe442b0a6317d9 # # This file is autogenerated by pip-compile-multi # To update, run: @@ -40,6 +40,10 @@ imagesize==1.4.1 # via sphinx ipykernel==6.26.0 # via -r docs.in +ipympl==0.9.3 + # via -r docs.in +ipython-genutils==0.2.0 + # via ipympl jinja2==3.1.2 # via # myst-parser @@ -106,7 +110,7 @@ referencing==0.31.0 # via # jsonschema # jsonschema-specifications -rpds-py==0.13.0 +rpds-py==0.13.1 # via # jsonschema # referencing From 7e6ea7a9cf6e3233ea5d4bcc645bd97f994c0de1 Mon Sep 17 00:00:00 2001 From: Jan-Lukas Wynen Date: Mon, 20 Nov 2023 15:51:51 +0100 Subject: [PATCH 3/3] Apply pre-commit fixes --- src/ess/diffraction/external/powgen/instrument_view.py | 2 +- src/ess/diffraction/logging.py | 1 - src/ess/diffraction/powder/conversions.py | 4 +--- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/src/ess/diffraction/external/powgen/instrument_view.py b/src/ess/diffraction/external/powgen/instrument_view.py index 2755c67a..bb0180c5 100644 --- a/src/ess/diffraction/external/powgen/instrument_view.py +++ b/src/ess/diffraction/external/powgen/instrument_view.py @@ -11,7 +11,7 @@ def instrument_view( positions: str = "position", pixel_size: Optional[float] = None, components: Optional[dict] = None, - **kwargs + **kwargs, ): """ Instrument view for the POWGEN instrument, with adjusted default arguments. diff --git a/src/ess/diffraction/logging.py b/src/ess/diffraction/logging.py index aed4ffa4..ab78fa66 100644 --- a/src/ess/diffraction/logging.py +++ b/src/ess/diffraction/logging.py @@ -1,7 +1,6 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2023 Scipp contributors (https://github.com/scipp) -from typing import Optional import logging diff --git a/src/ess/diffraction/powder/conversions.py b/src/ess/diffraction/powder/conversions.py index 8ec9b6cc..c7e0ca0d 100644 --- a/src/ess/diffraction/powder/conversions.py +++ b/src/ess/diffraction/powder/conversions.py @@ -73,9 +73,7 @@ def _restore_tof_if_in_wavelength(data: sc.DataArray) -> sc.DataArray: if 'wavelength' not in data.dims: return data - get_logger().info( - "Discarding coordinate 'wavelength' in favor of 'tof'." - ) + get_logger().info("Discarding coordinate 'wavelength' in favor of 'tof'.") temp_name = uuid.uuid4().hex aux = data.transform_coords( temp_name,