From 0922ddfabc2e3225f6b9f060e547c5b7c18424db Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Thu, 23 Jan 2025 13:48:07 +0100 Subject: [PATCH 1/9] fix: move coordinate transformation graph to ess.reflectometry --- src/ess/offspec/__init__.py | 7 ++ src/ess/offspec/conversions.py | 125 +++++++++++++++++++++++++++++++++ src/ess/offspec/data.py | 38 ++++++++++ 3 files changed, 170 insertions(+) create mode 100644 src/ess/offspec/__init__.py create mode 100644 src/ess/offspec/conversions.py create mode 100644 src/ess/offspec/data.py diff --git a/src/ess/offspec/__init__.py b/src/ess/offspec/__init__.py new file mode 100644 index 00000000..886e27ec --- /dev/null +++ b/src/ess/offspec/__init__.py @@ -0,0 +1,7 @@ +from . import data, conversions + + +__all__ = ( + "data", + "conversions", +) diff --git a/src/ess/offspec/conversions.py b/src/ess/offspec/conversions.py new file mode 100644 index 00000000..d637d2a8 --- /dev/null +++ b/src/ess/offspec/conversions.py @@ -0,0 +1,125 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +import scipp as sc +from scippneutron.conversion.graph import beamline, tof + +from ..reflectometry.conversions import reflectometry_q +from ..reflectometry.types import ( + BeamDivergenceLimits, + CoordTransformationGraph, + WavelengthBins, + YIndexLimits, + ZIndexLimits, +) + + +def theta(wavelength, scattered_beam, sample_rotation): + ''' + Angle of reflection. + + Computes the angle between the scattering direction of + the neutron and the sample surface. + + :math:`\\gamma^*` denotes the angle between the scattering direction + and the horizontal plane. + :math:`\\gamma` denotes the angle between the ray from sample position + to detection position + and the horizontal plane. + :math:`L_2` is the length of the ray from sample position to detector position. + :math:`v` is the velocity of the neutron at the sample. + :math:`t` is the travel time from sample to detector. + + The parabolic trajectory of the neutron satisfies + + .. math:: + + \\sin(\\gamma) L_2 = \\sin(\\gamma^*) v t - \\frac{g}{2} t^2 + + and + + .. math:: + + \\cos(\\gamma) L_2 = \\cos(\\gamma^*) vt + + where :math:`g` is the gravitational acceleration. + + The second equation tells us that the approximation :math:`L_2=vt` + will have a small error if :math:`\\gamma` is close to 0 and + the difference between :math:`\\gamma` and :math:`\\gamma^*` is small. + + Using this approximation we can solve the first equation, + and by expressing :math:`v` in terms of the wavelength we get + + .. math:: + + \\sin(\\gamma^*) = + \\sin(\\gamma) + \\frac{g}{2} \\frac{L_2 \\lambda^2 h^2}{m_n^2}. + + Finally, the scattering angle is obtained by subtracting the sample rotation + relative to the horizontal plane. + ''' + c = sc.constants.g * sc.constants.m_n**2 / sc.constants.h**2 + L2 = sc.norm(scattered_beam) + out = (c * L2 * wavelength**2).to( + unit='dimensionless' + ) + scattered_beam.fields.y / L2 + out = sc.asin(out, out=out) + out -= sample_rotation.to(unit='rad') + return out + + +def coordinate_transformation_graph() -> CoordTransformationGraph: + return { + **beamline.beamline(scatter=True), + **tof.elastic_wavelength("tof"), + "theta": theta, + "Q": reflectometry_q, + } + + +def add_coords( + da: sc.DataArray, + graph: dict, +) -> sc.DataArray: + "Adds scattering coordinates to the raw detector data." + return da.transform_coords( + ("wavelength", "theta", "Q", "L1", "L2"), + graph, + rename_dims=False, + keep_intermediate=False, + keep_aliases=False, + ) + + +def _not_between(v, a, b): + return (v < a) | (v > b) + + +def add_masks( + da: sc.DataArray, + ylim: YIndexLimits, + zlims: ZIndexLimits, + bdlim: BeamDivergenceLimits, + wbins: WavelengthBins, +): + """ + Masks the data by ranges in the detector + coordinates ``z`` and ``y``, and by the divergence of the beam, + and by wavelength. + """ + da.masks["stripe_range"] = _not_between(da.coords["stripe"], *ylim) + da.masks['z_range'] = _not_between(da.coords["z_index"], *zlims) + da.bins.masks["divergence_too_large"] = _not_between( + da.bins.coords["angle_of_divergence"], + bdlim[0].to(unit=da.bins.coords["angle_of_divergence"].bins.unit), + bdlim[1].to(unit=da.bins.coords["angle_of_divergence"].bins.unit), + ) + da.bins.masks['wavelength'] = _not_between( + da.bins.coords['wavelength'], + wbins[0], + wbins[-1], + ) + return da + + +providers = (coordinate_transformation_graph,) diff --git a/src/ess/offspec/data.py b/src/ess/offspec/data.py new file mode 100644 index 00000000..655ffedf --- /dev/null +++ b/src/ess/offspec/data.py @@ -0,0 +1,38 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) + +from ..reflectometry.types import Filename, ReferenceRun, SampleRun + +_version = "1" + + +def _make_pooch(): + import pooch + + return pooch.create( + path=pooch.os_cache("ess/offspec"), + env="ESS_AMOR_DATA_DIR", + base_url="https://public.esss.dk/groups/scipp/ess/offspec/{version}/", + version=_version, + registry={ + "sample.h5": "md5:02b8703230b6b1e6282c0d39eb94523c", + "direct_beam.h5": "md5:1c4e56afbd35edd96c7607e357981ccf", + }, + ) + + +_pooch = _make_pooch() + + +def offspec_sample_run() -> Filename[SampleRun]: + return Filename[SampleRun](_pooch.fetch("sample.h5")) + + +def offspec_direct_beam_run() -> Filename[ReferenceRun]: + return Filename[ReferenceRun](_pooch.fetch("direct_beam.h5")) + + +__all__ = [ + "offspec_sample_run", + "offspec_direct_beam_run", +] From 0ec1254f0853ff3ed493d1f77f0486dd40d3f480 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Mon, 14 Apr 2025 15:14:19 +0200 Subject: [PATCH 2/9] docs: implement offspec and add notebook to docs --- docs/user-guide/index.md | 1 + docs/user-guide/offspec/index.md | 8 + .../offspec/offspec_reduction.ipynb | 447 ++++++++++++++++++ src/ess/offspec/__init__.py | 29 +- src/ess/offspec/conversions.py | 129 +---- src/ess/offspec/corrections.py | 13 + src/ess/offspec/load.py | 32 ++ src/ess/offspec/maskings.py | 36 ++ src/ess/offspec/normalization.py | 49 ++ src/ess/offspec/types.py | 17 + src/ess/offspec/workflow.py | 36 ++ src/ess/reflectometry/normalization.py | 13 +- 12 files changed, 696 insertions(+), 114 deletions(-) create mode 100644 docs/user-guide/offspec/index.md create mode 100644 docs/user-guide/offspec/offspec_reduction.ipynb create mode 100644 src/ess/offspec/corrections.py create mode 100644 src/ess/offspec/load.py create mode 100644 src/ess/offspec/maskings.py create mode 100644 src/ess/offspec/normalization.py create mode 100644 src/ess/offspec/types.py create mode 100644 src/ess/offspec/workflow.py diff --git a/docs/user-guide/index.md b/docs/user-guide/index.md index 1ddd66e2..76ebe8b7 100644 --- a/docs/user-guide/index.md +++ b/docs/user-guide/index.md @@ -6,4 +6,5 @@ maxdepth: 1 --- amor/index estia/index +offspec/index ``` diff --git a/docs/user-guide/offspec/index.md b/docs/user-guide/offspec/index.md new file mode 100644 index 00000000..a33301f5 --- /dev/null +++ b/docs/user-guide/offspec/index.md @@ -0,0 +1,8 @@ +# Offspec + +```{toctree} +--- +maxdepth: 1 +--- +offspec-reduction +``` diff --git a/docs/user-guide/offspec/offspec_reduction.ipynb b/docs/user-guide/offspec/offspec_reduction.ipynb new file mode 100644 index 00000000..35981c58 --- /dev/null +++ b/docs/user-guide/offspec/offspec_reduction.ipynb @@ -0,0 +1,447 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": [ + "# Collimated data reduction for OFFSPEC" + ] + }, + { + "cell_type": "markdown", + "id": "1", + "metadata": {}, + "source": [ + "This notebook implements a reduction workflow for reflectometry data collected from the ISIS instrument OFFSPEC using a collimated beam. This workflow implements the same procedure as the corresponding workflow in Mantid, see https://docs.mantidproject.org/nightly/techniques/ISIS_Reflectometry.html." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2", + "metadata": {}, + "outputs": [], + "source": [ + "%matplotlib widget\n", + "from datetime import datetime\n", + "import platform\n", + "\n", + "import scipp as sc\n", + "from orsopy import fileio\n", + "\n", + "from ess import reflectometry, offspec\n", + "from ess.reflectometry.types import *\n", + "from ess.offspec.types import *" + ] + }, + { + "cell_type": "markdown", + "id": "3", + "metadata": {}, + "source": [ + "## Loading some data\n", + "\n", + "In this example, we load some test data provided by the `offspec` package. We need a sample measurement (the sample is `Air | Si(790 A) | Cu(300 A) | SiO2`) and a direct beam measurement. The latter was obtained by positioning the detector directly in the beam of incident neutrons and moving the sample out of the way. It gives an estimate for the ISIS pulse structure as a function of time-of-flight." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4", + "metadata": {}, + "outputs": [], + "source": [ + "wf = offspec.OffspecWorkflow()\n", + "wf[Filename[SampleRun]] = offspec.data.offspec_sample_run()\n", + "wf[Filename[ReferenceRun]] = offspec.data.offspec_direct_beam_run()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5", + "metadata": {}, + "outputs": [], + "source": [ + "wf.visualize(ReflectivityOverQ, graph_attr={'rankdir': 'LR'})" + ] + }, + { + "cell_type": "markdown", + "id": "6", + "metadata": {}, + "source": [ + "## Populating the ORSO header\n", + "\n", + "We will write the reduced data file following the ORSO `.ort`` standard `__, to enable a metadata rich header. We will create an empty header and then populate this." + ] + }, + { + "cell_type": "markdown", + "id": "7", + "metadata": {}, + "source": [ + "### The data source information" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8", + "metadata": {}, + "outputs": [], + "source": [ + "header = fileio.orso.Orso.empty()\n", + "\n", + "header.data_source.owner = fileio.base.Person(\n", + " name=\"Joshanial F. K. Cooper\",\n", + " affiliation=\"ISIS Neutron and Muon Source\",\n", + ")\n", + "header.data_source.experiment = fileio.data_source.Experiment(\n", + " title=\"OFFSPEC Sample Data\",\n", + " instrument=\"OFFSPEC\",\n", + " start_date=\"2020-12-14T10:34:02\",\n", + " probe=\"neutron\",\n", + " facility=\"RAL/ISIS/OFFSPEC\",\n", + ")\n", + "header.data_source.sample = fileio.data_source.Sample(\n", + " name=\"QCS sample\",\n", + " category=\"gas/solid\",\n", + " composition=\"Air | Si(790 A) | Cu(300 A) | SiO2\",\n", + ")\n", + "header.data_source.measurement = fileio.data_source.Measurement(\n", + " instrument_settings=fileio.data_source.InstrumentSettings(\n", + " incident_angle=fileio.base.Value(\n", + " wf.compute(RawDetectorData[SampleRun]).coords[\"theta\"].value,\n", + " wf.compute(RawDetectorData[SampleRun]).coords[\"theta\"].unit\n", + " ),\n", + " wavelength=None,\n", + " polarization=\"unpolarized\",\n", + " ),\n", + " data_files=[\n", + " offspec.data.offspec_sample_run().rsplit(\"/\", 1)[-1],\n", + " offspec.data.offspec_direct_beam_run().rsplit(\"/\", 1)[-1],\n", + " ],\n", + " scheme=\"energy-dispersive\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "9", + "metadata": {}, + "source": [ + "### The reduction details\n", + "\n", + "The `reduction` section can start to be populated also. Entries such as `corrections` will be filled up through the reduction process." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "10", + "metadata": {}, + "outputs": [], + "source": [ + "header.reduction.software = fileio.reduction.Software(\n", + " name=\"essreflectometry\", version=reflectometry.__version__, platform=platform.platform()\n", + ")\n", + "header.reduction.timestamp = datetime.now() # noqa: DTZ005\n", + "header.reduction.creator = fileio.base.Person(\n", + " name=\"I. D. Scientist\",\n", + " affiliation=\"European Spallation Source\",\n", + " contact=\"i.d.scientist@ess.eu\",\n", + ")\n", + "header.reduction.corrections = []\n", + "header.reduction.computer = platform.node()\n", + "header.reduction.script = \"offspec_reduction.ipynb\"" + ] + }, + { + "cell_type": "markdown", + "id": "11", + "metadata": {}, + "source": [ + "To ensure that the header object is carried through the process, we assign it to the sample `scipp.DataArray`. The direct beam header object will be overwritten at the normalisation step so we will keep this empty." + ] + }, + { + "cell_type": "markdown", + "id": "12", + "metadata": {}, + "source": [ + "### Determining the region of interest\n", + "\n", + "To determine what region of the detector contains the specular peak intensity we plot the intensity distribution of the sample measurement over `spectrum` (detector pixel) and `time-of-flight`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "13", + "metadata": {}, + "outputs": [], + "source": [ + "wf.compute(RawDetectorData[SampleRun]).hist(tof=50).plot(norm='log') \\\n", + "+ wf.compute(RawDetectorData[ReferenceRun]).hist(tof=50).plot(norm='log')" + ] + }, + { + "cell_type": "markdown", + "id": "14", + "metadata": {}, + "source": [ + "The region of interest is set in the workflow by setting `SpectrumLimits`. In this case it seems the specular peak is in the region `[389, 414]`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "15", + "metadata": {}, + "outputs": [], + "source": [ + "wf[SpectrumLimits] = (sc.scalar(389, unit=None), sc.scalar(414, unit=None))\n", + "header.reduction.corrections += ['region of interest defined as spectrum 389:415']" + ] + }, + { + "cell_type": "markdown", + "id": "16", + "metadata": {}, + "source": [ + "## Coordinate transform graph\n", + "\n", + "To compute the wavelength $\\lambda$ we can use a coordinate transform graph. The OFFSPEC graph is the standard reflectometry graph, shown below." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "17", + "metadata": {}, + "outputs": [], + "source": [ + "sc.show_graph(wf.compute(CoordTransformationGraph[SampleRun]), simplified=True)" + ] + }, + { + "cell_type": "markdown", + "id": "18", + "metadata": {}, + "source": [ + "Since the direct beam measurement is __not__ a reflectometry measurement, we use the `no_scatter_graph` to convert this to wavelength." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "19", + "metadata": {}, + "outputs": [], + "source": [ + "sc.show_graph(wf.compute(CoordTransformationGraph[ReferenceRun]), simplified=True)" + ] + }, + { + "cell_type": "markdown", + "id": "20", + "metadata": {}, + "source": [ + "## Normalization by monitor\n", + "It is necessary to normalize the sample and direct beam measurements by the summed monitor counts, which accounts for different lengths of measurement and long-timescale natural variation in the pulse. This will ensure that the final data has the correct scaling when the reflectivity data is normalized. First, we convert the data to wavelength, using the `no_scatter_graph` used previously for the direct beam.\n", + "\n", + "The most reliable monitor for the OFFSPEC instrument is 'monitor2' in the file, therefore this is used." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "21", + "metadata": {}, + "outputs": [], + "source": [ + "wf.compute(MonitorData[SampleRun]).plot()" + ] + }, + { + "cell_type": "markdown", + "id": "22", + "metadata": {}, + "source": [ + "A background subtraction is then performed on the monitor data, where the background is taken as any counts at wavelengths greater than 15 Å. We also mask all events in the sample- and direct-beam measurements that fall outside of the wavelength range we expect for the instrument." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "23", + "metadata": {}, + "outputs": [], + "source": [ + "wf[BackgroundMinWavelength] = sc.scalar(15, unit='angstrom')\n", + "wf[WavelengthBins] = sc.linspace(dim='wavelength', start=2, stop=14, num=2, unit='angstrom')\n", + "header.reduction.corrections += ['monitor background subtraction, background above 15 Å']" + ] + }, + { + "cell_type": "markdown", + "id": "24", + "metadata": {}, + "source": [ + "## Normalisation of sample by direct beam\n", + "The sample and direct beam measurements (which have been normalised by monitor counts) are then histogrammed in $Q$ to 100 geometrically spaced points. The histogrammed direct beam is then used to normalised the sample.\n", + "\n", + "Importantly, some relevant metadata (including the ORSO header object) is carried over." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "25", + "metadata": {}, + "outputs": [], + "source": [ + "wf[QBins] = sc.geomspace('Q', 0.005, 0.033, 101, unit='1/angstrom')\n", + "header.reduction.corrections += [\"normalisation by direct beam\"]" + ] + }, + { + "cell_type": "markdown", + "id": "26", + "metadata": {}, + "source": [ + "We will assume a 3 % of $Q$ resolution function to be included in our file." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "27", + "metadata": {}, + "outputs": [], + "source": [ + "wf[QResolution] = 0.03" + ] + }, + { + "cell_type": "markdown", + "id": "28", + "metadata": {}, + "source": [ + "### Conversion to $Q$\n", + "This normalised data can then be used to compute the reflectivity as a function of the scattering vector $Q$.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "29", + "metadata": {}, + "outputs": [], + "source": [ + "Roq = wf.compute(ReflectivityOverQ).hist()\n", + "Roq.plot(norm='log')" + ] + }, + { + "cell_type": "markdown", + "id": "30", + "metadata": {}, + "source": [ + "## Saving the scipp-reduced data as .ort\n", + "We constructed the ORSO header through the reduction process. We can now make use of this when we save our .ort file." + ] + }, + { + "cell_type": "markdown", + "id": "31", + "metadata": {}, + "source": [ + "And it is necessary to add the column for our uncertainties, which details the **meaning** of the uncertainty values we have given." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "32", + "metadata": {}, + "outputs": [], + "source": [ + "header.columns.append(fileio.base.ErrorColumn(error_of='R', error_type='uncertainty', value_is='sigma'))\n", + "header.columns.append(fileio.base.ErrorColumn(error_of='Q', error_type='resolution', value_is='sigma'))" + ] + }, + { + "cell_type": "markdown", + "id": "33", + "metadata": {}, + "source": [ + "Finally, we can save the file." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "34", + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "ds = fileio.orso.OrsoDataset(\n", + " header,\n", + " np.array([\n", + " sc.midpoints(Roq.coords['Q']).values,\n", + " Roq.data.values,\n", + " sc.stddevs(Roq.data).values,\n", + " Roq.coords['Q_resolution'].values]\n", + " ).T\n", + ")\n", + "\n", + "fileio.save_orso([ds], 'offspec.ort')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "35", + "metadata": {}, + "outputs": [], + "source": [ + "!head -n 50 offspec.ort" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "36", + "metadata": {}, + "outputs": [], + "source": [ + "header.columns" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.14" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/ess/offspec/__init__.py b/src/ess/offspec/__init__.py index 886e27ec..74aa47b3 100644 --- a/src/ess/offspec/__init__.py +++ b/src/ess/offspec/__init__.py @@ -1,7 +1,32 @@ -from . import data, conversions +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +import sciline + +from ..reflectometry import providers as reflectometry_providers +from . import conversions, data, load, maskings, normalization, types, workflow + +providers = ( + *reflectometry_providers, + *load.providers, + *conversions.providers, + *maskings.providers, + *workflow.providers, + *normalization.providers, +) + + +def OffspecWorkflow() -> sciline.Pipeline: + """ + Workflow with default parameters for the Estia instrument. + """ + return sciline.Pipeline(providers=providers) __all__ = ( - "data", "conversions", + "data", + "load", + "maskings", + "types", + "workflow", ) diff --git a/src/ess/offspec/conversions.py b/src/ess/offspec/conversions.py index d637d2a8..d92f9e19 100644 --- a/src/ess/offspec/conversions.py +++ b/src/ess/offspec/conversions.py @@ -1,125 +1,44 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2023 Scipp contributors (https://github.com/scipp) import scipp as sc +from scipp.spatial import rotations_from_rotvecs from scippneutron.conversion.graph import beamline, tof -from ..reflectometry.conversions import reflectometry_q from ..reflectometry.types import ( - BeamDivergenceLimits, - CoordTransformationGraph, - WavelengthBins, - YIndexLimits, - ZIndexLimits, + ReferenceRun, + SampleRun, ) +from .types import CoordTransformationGraph -def theta(wavelength, scattered_beam, sample_rotation): - ''' - Angle of reflection. - - Computes the angle between the scattering direction of - the neutron and the sample surface. - - :math:`\\gamma^*` denotes the angle between the scattering direction - and the horizontal plane. - :math:`\\gamma` denotes the angle between the ray from sample position - to detection position - and the horizontal plane. - :math:`L_2` is the length of the ray from sample position to detector position. - :math:`v` is the velocity of the neutron at the sample. - :math:`t` is the travel time from sample to detector. - - The parabolic trajectory of the neutron satisfies - - .. math:: - - \\sin(\\gamma) L_2 = \\sin(\\gamma^*) v t - \\frac{g}{2} t^2 - - and - - .. math:: - - \\cos(\\gamma) L_2 = \\cos(\\gamma^*) vt - - where :math:`g` is the gravitational acceleration. - - The second equation tells us that the approximation :math:`L_2=vt` - will have a small error if :math:`\\gamma` is close to 0 and - the difference between :math:`\\gamma` and :math:`\\gamma^*` is small. - - Using this approximation we can solve the first equation, - and by expressing :math:`v` in terms of the wavelength we get - - .. math:: - - \\sin(\\gamma^*) = - \\sin(\\gamma) + \\frac{g}{2} \\frac{L_2 \\lambda^2 h^2}{m_n^2}. - - Finally, the scattering angle is obtained by subtracting the sample rotation - relative to the horizontal plane. - ''' - c = sc.constants.g * sc.constants.m_n**2 / sc.constants.h**2 - L2 = sc.norm(scattered_beam) - out = (c * L2 * wavelength**2).to( - unit='dimensionless' - ) + scattered_beam.fields.y / L2 - out = sc.asin(out, out=out) - out -= sample_rotation.to(unit='rad') - return out +def adjust_pixel_positions_for_sample(data: sc.DataArray): + rotation = rotations_from_rotvecs( + rotation_vectors=sc.vector( + value=[-2.0 * data.coords['theta'].value, 0, 0], unit=sc.units.deg + ) + ) + return data.assign_coords( + position=rotation * (data.coords['position'] - data.coords['sample_position']) + ) -def coordinate_transformation_graph() -> CoordTransformationGraph: +def coordinate_transformation_graph_sample() -> CoordTransformationGraph[SampleRun]: return { **beamline.beamline(scatter=True), **tof.elastic_wavelength("tof"), - "theta": theta, - "Q": reflectometry_q, } -def add_coords( - da: sc.DataArray, - graph: dict, -) -> sc.DataArray: - "Adds scattering coordinates to the raw detector data." - return da.transform_coords( - ("wavelength", "theta", "Q", "L1", "L2"), - graph, - rename_dims=False, - keep_intermediate=False, - keep_aliases=False, - ) - - -def _not_between(v, a, b): - return (v < a) | (v > b) - - -def add_masks( - da: sc.DataArray, - ylim: YIndexLimits, - zlims: ZIndexLimits, - bdlim: BeamDivergenceLimits, - wbins: WavelengthBins, +def coordinate_transformation_graph_reference() -> ( + CoordTransformationGraph[ReferenceRun] ): - """ - Masks the data by ranges in the detector - coordinates ``z`` and ``y``, and by the divergence of the beam, - and by wavelength. - """ - da.masks["stripe_range"] = _not_between(da.coords["stripe"], *ylim) - da.masks['z_range'] = _not_between(da.coords["z_index"], *zlims) - da.bins.masks["divergence_too_large"] = _not_between( - da.bins.coords["angle_of_divergence"], - bdlim[0].to(unit=da.bins.coords["angle_of_divergence"].bins.unit), - bdlim[1].to(unit=da.bins.coords["angle_of_divergence"].bins.unit), - ) - da.bins.masks['wavelength'] = _not_between( - da.bins.coords['wavelength'], - wbins[0], - wbins[-1], - ) - return da + return { + **beamline.beamline(scatter=False), + **tof.elastic_wavelength("tof"), + } -providers = (coordinate_transformation_graph,) +providers = ( + coordinate_transformation_graph_sample, + coordinate_transformation_graph_reference, +) diff --git a/src/ess/offspec/corrections.py b/src/ess/offspec/corrections.py new file mode 100644 index 00000000..ca10f274 --- /dev/null +++ b/src/ess/offspec/corrections.py @@ -0,0 +1,13 @@ +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +import scipp as sc + + +def correct_by_monitor( + da: sc.DataArray, + mon: sc.DataArray, + wlims: tuple[sc.Variable, sc.Variable], + wbmin: sc.Variable, +) -> sc.DataArray: + "Corrects the data by the monitor intensity" + mon = mon - sc.values(mon['wavelength', wbmin:].mean()) + return da / sc.values(mon['wavelength', wlims[0] : wlims[-1]].sum()) diff --git a/src/ess/offspec/load.py b/src/ess/offspec/load.py new file mode 100644 index 00000000..98b94584 --- /dev/null +++ b/src/ess/offspec/load.py @@ -0,0 +1,32 @@ +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +import scipp as sc + +from ..reflectometry.types import Filename, RawDetectorData, ReferenceRun, RunType +from .types import CoordTransformationGraph, MonitorData + + +def load_offspec_events( + filename: Filename[RunType], +) -> RawDetectorData[RunType]: + full = sc.io.load_hdf5(filename) + da = full['data'] + da.coords['theta'] = full.pop('Theta')[-1].data + da = da.bins.concat('tof') + return da + + +def load_offspec_monitor( + filename: Filename[RunType], + graph: CoordTransformationGraph[ReferenceRun], +) -> MonitorData[RunType]: + full = sc.io.load_hdf5(filename) + mon = full["monitors"]["monitor2"]["data"].transform_coords( + "wavelength", graph=graph + ) + return mon + + +providers = ( + load_offspec_events, + load_offspec_monitor, +) diff --git a/src/ess/offspec/maskings.py b/src/ess/offspec/maskings.py new file mode 100644 index 00000000..a7b76040 --- /dev/null +++ b/src/ess/offspec/maskings.py @@ -0,0 +1,36 @@ +import scipp as sc + +from ..reflectometry.types import ( + WavelengthBins, +) +from .types import SpectrumLimits + + +def _not_between(v, a, b): + return (v < a) | (v > b) + + +def add_masks( + da: sc.DataArray, + spectrum_limits: SpectrumLimits, + wbins: WavelengthBins, +) -> sc.DataArray: + """ + Masks the data by range in the detector spectrum and by wavelength. + """ + da = da.assign_masks( + not_specularly_reflected_signal=_not_between( + da.coords['spectrum'], *spectrum_limits + ) + ) + da = da.bins.assign_masks( + wavelength=_not_between( + da.bins.coords['wavelength'], + wbins[0], + wbins[-1], + ), + ) + return da + + +providers = () diff --git a/src/ess/offspec/normalization.py b/src/ess/offspec/normalization.py new file mode 100644 index 00000000..a112eb7b --- /dev/null +++ b/src/ess/offspec/normalization.py @@ -0,0 +1,49 @@ +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +from ..reflectometry.conversions import reflectometry_q +from ..reflectometry.types import ( + QResolution, + ReducibleData, + Reference, + ReferenceRun, + Sample, + SampleRun, +) + + +def evaluate_reference( + reference: ReducibleData[ReferenceRun], + sample: ReducibleData[SampleRun], + qresolution: QResolution, +) -> Reference: + """ + Adds a :math:`Q`. The coordinate is computed as if the data came from + the sample measurement, that is, they use the ``sample_rotation`` + of the sample measurement. + """ + ref = reference.copy() + ref.coords.pop("theta") + ref.bins.coords['Q'] = reflectometry_q( + wavelength=ref.bins.coords['wavelength'], theta=sample.coords['theta'] + ) + ref.bins.coords['Q_resolution'] = qresolution * ref.bins.coords['Q'] + return ref.bins.concat() + + +def evaluate_sample( + reference: ReducibleData[ReferenceRun], + sample: ReducibleData[SampleRun], +) -> Sample: + """ + Adds a :math:`Q`. + """ + sample = sample.copy() + sample.bins.coords['Q'] = reflectometry_q( + wavelength=sample.bins.coords['wavelength'], theta=sample.coords['theta'] + ) + return sample.bins.concat() + + +providers = ( + evaluate_reference, + evaluate_sample, +) diff --git a/src/ess/offspec/types.py b/src/ess/offspec/types.py new file mode 100644 index 00000000..a9b925fe --- /dev/null +++ b/src/ess/offspec/types.py @@ -0,0 +1,17 @@ +from typing import NewType + +import sciline +import scipp as sc + +from ..reflectometry.types import RunType + +SpectrumLimits = NewType("SpectrumLimits", tuple[sc.Variable, sc.Variable]) +BackgroundMinWavelength = NewType("BackgroundMinWavelength", sc.Variable) + + +class CoordTransformationGraph(sciline.Scope[RunType, dict], dict): + """Coordinate transformation for the runtype""" + + +class MonitorData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """ "Monitor data from the run file, with background subtracted""" diff --git a/src/ess/offspec/workflow.py b/src/ess/offspec/workflow.py new file mode 100644 index 00000000..61c0a9dc --- /dev/null +++ b/src/ess/offspec/workflow.py @@ -0,0 +1,36 @@ +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +from ..reflectometry.types import ( + RawDetectorData, + ReducibleData, + RunType, + WavelengthBins, +) +from .corrections import correct_by_monitor +from .maskings import add_masks +from .types import ( + BackgroundMinWavelength, + CoordTransformationGraph, + MonitorData, + SpectrumLimits, +) + + +def add_coords_masks_and_apply_corrections_direct_beam( + da: RawDetectorData[RunType], + spectrum_limits: SpectrumLimits, + wlims: WavelengthBins, + wbmin: BackgroundMinWavelength, + monitor: MonitorData[RunType], + graph: CoordTransformationGraph[RunType], +) -> ReducibleData[RunType]: + """ + Computes coordinates, masks and corrections that are + the same for the sample measurement and the reference measurement. + """ + da = da.transform_coords(('wavelength',), graph=graph) + da = add_masks(da, spectrum_limits, wlims) + da = correct_by_monitor(da, monitor, wlims, wbmin) + return da + + +providers = (add_coords_masks_and_apply_corrections_direct_beam,) diff --git a/src/ess/reflectometry/normalization.py b/src/ess/reflectometry/normalization.py index b1e249dc..2096b606 100644 --- a/src/ess/reflectometry/normalization.py +++ b/src/ess/reflectometry/normalization.py @@ -62,14 +62,13 @@ def reduce_sample_over_q( s = sample.bins.concat().bin(Q=qbins) h = sc.values(reference.hist(Q=s.coords['Q'])) R = s / h.data - R.coords['Q_resolution'] = sc.sqrt( - ( - (sc.values(reference) * reference.coords['Q_resolution'] ** 2) - .flatten(to='Q') - .hist(Q=s.coords['Q']) + if 'Q_resolution' in reference.coords or 'Q_resolution' in reference.bins.coords: + resolution = reference.coords.get( + 'Q_resolution', reference.bins.coords['Q_resolution'] ) - / h - ).data + R.coords['Q_resolution'] = sc.sqrt( + ((sc.values(reference) * resolution**2).hist(Q=s.coords['Q'])) / h + ).data return R From 7190e619975ed895a740eedd48f0c033fdb70c8b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci-lite[bot]" <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com> Date: Mon, 14 Apr 2025 13:15:35 +0000 Subject: [PATCH 3/9] Apply automatic formatting --- src/ess/offspec/data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ess/offspec/data.py b/src/ess/offspec/data.py index 655ffedf..8c0d1fe4 100644 --- a/src/ess/offspec/data.py +++ b/src/ess/offspec/data.py @@ -33,6 +33,6 @@ def offspec_direct_beam_run() -> Filename[ReferenceRun]: __all__ = [ - "offspec_sample_run", "offspec_direct_beam_run", + "offspec_sample_run", ] From c7260201f8c6183a57d41a4cae4517ea6ab9bfd2 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Mon, 14 Apr 2025 15:17:37 +0200 Subject: [PATCH 4/9] fix: remove unused --- src/ess/offspec/conversions.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/src/ess/offspec/conversions.py b/src/ess/offspec/conversions.py index d92f9e19..2d08eeca 100644 --- a/src/ess/offspec/conversions.py +++ b/src/ess/offspec/conversions.py @@ -1,7 +1,5 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2023 Scipp contributors (https://github.com/scipp) -import scipp as sc -from scipp.spatial import rotations_from_rotvecs from scippneutron.conversion.graph import beamline, tof from ..reflectometry.types import ( @@ -11,17 +9,6 @@ from .types import CoordTransformationGraph -def adjust_pixel_positions_for_sample(data: sc.DataArray): - rotation = rotations_from_rotvecs( - rotation_vectors=sc.vector( - value=[-2.0 * data.coords['theta'].value, 0, 0], unit=sc.units.deg - ) - ) - return data.assign_coords( - position=rotation * (data.coords['position'] - data.coords['sample_position']) - ) - - def coordinate_transformation_graph_sample() -> CoordTransformationGraph[SampleRun]: return { **beamline.beamline(scatter=True), From ea8126b5f7f7d9176085d0aa94320cd0fdb08afa Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Mon, 14 Apr 2025 15:37:23 +0200 Subject: [PATCH 5/9] fix --- src/ess/reflectometry/normalization.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/src/ess/reflectometry/normalization.py b/src/ess/reflectometry/normalization.py index 2096b606..cdd5a396 100644 --- a/src/ess/reflectometry/normalization.py +++ b/src/ess/reflectometry/normalization.py @@ -60,14 +60,26 @@ def reduce_sample_over_q( Returns reflectivity as a function of :math:`Q`. """ s = sample.bins.concat().bin(Q=qbins) - h = sc.values(reference.hist(Q=s.coords['Q'])) + h = sc.values( + (reference if reference.bins is None else reference.bins.concat()).hist( + Q=s.coords['Q'] + ) + ) R = s / h.data if 'Q_resolution' in reference.coords or 'Q_resolution' in reference.bins.coords: - resolution = reference.coords.get( - 'Q_resolution', reference.bins.coords['Q_resolution'] + resolution = ( + reference.coords['Q_resolution'] + if 'Q_resolution' in reference.coords + else reference.bins.coords['Q_resolution'] ) + weighted_resolution = sc.values(reference) * resolution**2 R.coords['Q_resolution'] = sc.sqrt( - ((sc.values(reference) * resolution**2).hist(Q=s.coords['Q'])) / h + ( + weighted_resolution + if weighted_resolution.bins is None + else weighted_resolution.bins.concat() + ).hist(Q=s.coords['Q']) + / h ).data return R From 24d829dfa6cb5e0a648b94d5da25b144b9f8a9d5 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Mon, 14 Apr 2025 15:38:09 +0200 Subject: [PATCH 6/9] fix: concatenate later --- src/ess/offspec/normalization.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/ess/offspec/normalization.py b/src/ess/offspec/normalization.py index a112eb7b..2bad2bb2 100644 --- a/src/ess/offspec/normalization.py +++ b/src/ess/offspec/normalization.py @@ -16,17 +16,17 @@ def evaluate_reference( qresolution: QResolution, ) -> Reference: """ - Adds a :math:`Q`. The coordinate is computed as if the data came from + Adds a :math:`Q` coordinate computed as if the data came from the sample measurement, that is, they use the ``sample_rotation`` of the sample measurement. """ - ref = reference.copy() + ref = reference.copy(deep=False) ref.coords.pop("theta") ref.bins.coords['Q'] = reflectometry_q( wavelength=ref.bins.coords['wavelength'], theta=sample.coords['theta'] ) ref.bins.coords['Q_resolution'] = qresolution * ref.bins.coords['Q'] - return ref.bins.concat() + return ref def evaluate_sample( @@ -34,13 +34,13 @@ def evaluate_sample( sample: ReducibleData[SampleRun], ) -> Sample: """ - Adds a :math:`Q`. + Adds the :math:`Q` coordinate. """ - sample = sample.copy() + sample = sample.copy(deep=False) sample.bins.coords['Q'] = reflectometry_q( wavelength=sample.bins.coords['wavelength'], theta=sample.coords['theta'] ) - return sample.bins.concat() + return sample providers = ( From e5e4274ee0101373081c2c2f5a40e872c1629437 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Thu, 24 Apr 2025 09:13:10 +0200 Subject: [PATCH 7/9] fix: update licences --- src/ess/estia/maskings.py | 2 ++ src/ess/offspec/conversions.py | 2 +- src/ess/offspec/corrections.py | 1 + src/ess/offspec/data.py | 3 +-- src/ess/offspec/load.py | 1 + src/ess/offspec/maskings.py | 2 ++ src/ess/offspec/normalization.py | 1 + src/ess/offspec/types.py | 2 ++ src/ess/offspec/workflow.py | 1 + src/ess/reflectometry/gui.py | 2 ++ 10 files changed, 14 insertions(+), 3 deletions(-) diff --git a/src/ess/estia/maskings.py b/src/ess/estia/maskings.py index e1ed09fb..dfdeb40c 100644 --- a/src/ess/estia/maskings.py +++ b/src/ess/estia/maskings.py @@ -1,3 +1,5 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import scipp as sc from ..reflectometry.types import ( diff --git a/src/ess/offspec/conversions.py b/src/ess/offspec/conversions.py index 2d08eeca..7b29ec31 100644 --- a/src/ess/offspec/conversions.py +++ b/src/ess/offspec/conversions.py @@ -1,5 +1,5 @@ # SPDX-License-Identifier: BSD-3-Clause -# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) from scippneutron.conversion.graph import beamline, tof from ..reflectometry.types import ( diff --git a/src/ess/offspec/corrections.py b/src/ess/offspec/corrections.py index ca10f274..0d87bedb 100644 --- a/src/ess/offspec/corrections.py +++ b/src/ess/offspec/corrections.py @@ -1,3 +1,4 @@ +# SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import scipp as sc diff --git a/src/ess/offspec/data.py b/src/ess/offspec/data.py index 8c0d1fe4..b90aa368 100644 --- a/src/ess/offspec/data.py +++ b/src/ess/offspec/data.py @@ -1,6 +1,5 @@ # SPDX-License-Identifier: BSD-3-Clause -# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) - +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) from ..reflectometry.types import Filename, ReferenceRun, SampleRun _version = "1" diff --git a/src/ess/offspec/load.py b/src/ess/offspec/load.py index 98b94584..3d4f3ab5 100644 --- a/src/ess/offspec/load.py +++ b/src/ess/offspec/load.py @@ -1,3 +1,4 @@ +# SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import scipp as sc diff --git a/src/ess/offspec/maskings.py b/src/ess/offspec/maskings.py index a7b76040..e498b397 100644 --- a/src/ess/offspec/maskings.py +++ b/src/ess/offspec/maskings.py @@ -1,3 +1,5 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import scipp as sc from ..reflectometry.types import ( diff --git a/src/ess/offspec/normalization.py b/src/ess/offspec/normalization.py index 2bad2bb2..8ab2ca00 100644 --- a/src/ess/offspec/normalization.py +++ b/src/ess/offspec/normalization.py @@ -1,3 +1,4 @@ +# SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) from ..reflectometry.conversions import reflectometry_q from ..reflectometry.types import ( diff --git a/src/ess/offspec/types.py b/src/ess/offspec/types.py index a9b925fe..b591015a 100644 --- a/src/ess/offspec/types.py +++ b/src/ess/offspec/types.py @@ -1,3 +1,5 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) from typing import NewType import sciline diff --git a/src/ess/offspec/workflow.py b/src/ess/offspec/workflow.py index 61c0a9dc..d9c033b4 100644 --- a/src/ess/offspec/workflow.py +++ b/src/ess/offspec/workflow.py @@ -1,3 +1,4 @@ +# SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) from ..reflectometry.types import ( RawDetectorData, diff --git a/src/ess/reflectometry/gui.py b/src/ess/reflectometry/gui.py index 87c9b8e1..02fd6d09 100644 --- a/src/ess/reflectometry/gui.py +++ b/src/ess/reflectometry/gui.py @@ -1,3 +1,5 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import glob import os import uuid From 468b780cf9da929d210362e4839d748359348b4f Mon Sep 17 00:00:00 2001 From: jokasimr Date: Thu, 24 Apr 2025 09:13:28 +0200 Subject: [PATCH 8/9] Update docs/user-guide/offspec/index.md Co-authored-by: Jan-Lukas Wynen --- docs/user-guide/offspec/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/user-guide/offspec/index.md b/docs/user-guide/offspec/index.md index a33301f5..dc3f3ef5 100644 --- a/docs/user-guide/offspec/index.md +++ b/docs/user-guide/offspec/index.md @@ -4,5 +4,5 @@ --- maxdepth: 1 --- -offspec-reduction +offspec_reduction ``` From 3b664074ade799a72a68be33d886203c3e007d26 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Thu, 24 Apr 2025 11:52:43 +0200 Subject: [PATCH 9/9] fix: move workflow from init + use NeXusMonitorName domain type --- src/ess/offspec/__init__.py | 23 +++-------------------- src/ess/offspec/load.py | 5 +++-- src/ess/offspec/types.py | 5 +++++ src/ess/offspec/workflow.py | 24 ++++++++++++++++++++++-- 4 files changed, 33 insertions(+), 24 deletions(-) diff --git a/src/ess/offspec/__init__.py b/src/ess/offspec/__init__.py index 74aa47b3..e537a7d6 100644 --- a/src/ess/offspec/__init__.py +++ b/src/ess/offspec/__init__.py @@ -1,32 +1,15 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) -import sciline - -from ..reflectometry import providers as reflectometry_providers from . import conversions, data, load, maskings, normalization, types, workflow - -providers = ( - *reflectometry_providers, - *load.providers, - *conversions.providers, - *maskings.providers, - *workflow.providers, - *normalization.providers, -) - - -def OffspecWorkflow() -> sciline.Pipeline: - """ - Workflow with default parameters for the Estia instrument. - """ - return sciline.Pipeline(providers=providers) - +from .workflow import OffspecWorkflow __all__ = ( + "OffspecWorkflow", "conversions", "data", "load", "maskings", + "normalization", "types", "workflow", ) diff --git a/src/ess/offspec/load.py b/src/ess/offspec/load.py index 3d4f3ab5..66c3771f 100644 --- a/src/ess/offspec/load.py +++ b/src/ess/offspec/load.py @@ -3,7 +3,7 @@ import scipp as sc from ..reflectometry.types import Filename, RawDetectorData, ReferenceRun, RunType -from .types import CoordTransformationGraph, MonitorData +from .types import CoordTransformationGraph, MonitorData, NeXusMonitorName def load_offspec_events( @@ -19,9 +19,10 @@ def load_offspec_events( def load_offspec_monitor( filename: Filename[RunType], graph: CoordTransformationGraph[ReferenceRun], + monitor_name: NeXusMonitorName, ) -> MonitorData[RunType]: full = sc.io.load_hdf5(filename) - mon = full["monitors"]["monitor2"]["data"].transform_coords( + mon = full["monitors"][monitor_name]["data"].transform_coords( "wavelength", graph=graph ) return mon diff --git a/src/ess/offspec/types.py b/src/ess/offspec/types.py index b591015a..388de10f 100644 --- a/src/ess/offspec/types.py +++ b/src/ess/offspec/types.py @@ -5,6 +5,8 @@ import sciline import scipp as sc +from ess.reduce.nexus import types as reduce_t + from ..reflectometry.types import RunType SpectrumLimits = NewType("SpectrumLimits", tuple[sc.Variable, sc.Variable]) @@ -17,3 +19,6 @@ class CoordTransformationGraph(sciline.Scope[RunType, dict], dict): class MonitorData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): """ "Monitor data from the run file, with background subtracted""" + + +NeXusMonitorName = reduce_t.NeXusName diff --git a/src/ess/offspec/workflow.py b/src/ess/offspec/workflow.py index d9c033b4..8963813c 100644 --- a/src/ess/offspec/workflow.py +++ b/src/ess/offspec/workflow.py @@ -1,22 +1,42 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +import sciline + +from ..reflectometry import providers as reflectometry_providers from ..reflectometry.types import ( RawDetectorData, ReducibleData, RunType, WavelengthBins, ) +from . import conversions, load, maskings, normalization from .corrections import correct_by_monitor from .maskings import add_masks from .types import ( BackgroundMinWavelength, CoordTransformationGraph, MonitorData, + NeXusMonitorName, SpectrumLimits, ) -def add_coords_masks_and_apply_corrections_direct_beam( +def OffspecWorkflow() -> sciline.Pipeline: + """ + Workflow with default parameters for the Offspec instrument. + """ + ps = ( + *providers, + *reflectometry_providers, + *load.providers, + *conversions.providers, + *maskings.providers, + *normalization.providers, + ) + return sciline.Pipeline(providers=ps, params={NeXusMonitorName: 'monitor2'}) + + +def add_coords_masks_and_apply_corrections( da: RawDetectorData[RunType], spectrum_limits: SpectrumLimits, wlims: WavelengthBins, @@ -34,4 +54,4 @@ def add_coords_masks_and_apply_corrections_direct_beam( return da -providers = (add_coords_masks_and_apply_corrections_direct_beam,) +providers = (add_coords_masks_and_apply_corrections,)