diff --git a/docs/user-guide/estia/estia-mcstas-reduction.ipynb b/docs/user-guide/estia/estia-mcstas-reduction.ipynb index ce65c6cb..83893748 100644 --- a/docs/user-guide/estia/estia-mcstas-reduction.ipynb +++ b/docs/user-guide/estia/estia-mcstas-reduction.ipynb @@ -31,7 +31,7 @@ "\n", "from ess.estia.load import load_mcstas_events\n", "from ess.estia.data import estia_mcstas_example, estia_mcstas_groundtruth\n", - "from ess.estia import EstiaWorkflow\n", + "from ess.estia import EstiaMcStasWorkflow\n", "from ess.reflectometry.types import *\n", "from ess.reflectometry.figures import wavelength_z_figure, wavelength_theta_figure, q_theta_figure" ] @@ -52,7 +52,7 @@ "outputs": [], "source": [ "\n", - "wf = EstiaWorkflow()\n", + "wf = EstiaMcStasWorkflow()\n", "wf.insert(load_mcstas_events)\n", "wf[Filename[ReferenceRun]] = estia_mcstas_example('reference')\n", "\n", @@ -85,9 +85,19 @@ ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": null, "id": "5", "metadata": {}, + "outputs": [], + "source": [ + "wf.visualize(graph_attr={'rankdir':\"LR\"})" + ] + }, + { + "cell_type": "markdown", + "id": "6", + "metadata": {}, "source": [ "## Ni/Ti multilayer sample\n", "\n", @@ -98,7 +108,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6", + "id": "7", "metadata": {}, "outputs": [], "source": [ @@ -115,7 +125,7 @@ }, { "cell_type": "markdown", - "id": "7", + "id": "8", "metadata": {}, "source": [ "Below are a number of figures displaying different projections of the measured intensity distribution." @@ -124,7 +134,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -138,7 +148,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9", + "id": "10", "metadata": {}, "outputs": [], "source": [ @@ -148,7 +158,7 @@ { "cell_type": "code", "execution_count": null, - "id": "10", + "id": "11", "metadata": {}, "outputs": [], "source": [ @@ -158,7 +168,7 @@ { "cell_type": "code", "execution_count": null, - "id": "11", + "id": "12", "metadata": {}, "outputs": [], "source": [ @@ -167,7 +177,7 @@ }, { "cell_type": "markdown", - "id": "12", + "id": "13", "metadata": {}, "source": [ "## Ni on Silicon" @@ -176,7 +186,7 @@ { "cell_type": "code", "execution_count": null, - "id": "13", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -193,7 +203,7 @@ { "cell_type": "code", "execution_count": null, - "id": "14", + "id": "15", "metadata": {}, "outputs": [], "source": [ @@ -207,7 +217,7 @@ { "cell_type": "code", "execution_count": null, - "id": "15", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -217,7 +227,7 @@ { "cell_type": "code", "execution_count": null, - "id": "16", + "id": "17", "metadata": {}, "outputs": [], "source": [ @@ -227,7 +237,7 @@ { "cell_type": "code", "execution_count": null, - "id": "17", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -236,7 +246,7 @@ }, { "cell_type": "markdown", - "id": "18", + "id": "19", "metadata": {}, "source": [ "## SiO2 on Silicon" @@ -245,7 +255,7 @@ { "cell_type": "code", "execution_count": null, - "id": "19", + "id": "20", "metadata": {}, "outputs": [], "source": [ @@ -262,7 +272,7 @@ { "cell_type": "code", "execution_count": null, - "id": "20", + "id": "21", "metadata": {}, "outputs": [], "source": [ @@ -276,7 +286,7 @@ { "cell_type": "code", "execution_count": null, - "id": "21", + "id": "22", "metadata": {}, "outputs": [], "source": [ @@ -286,7 +296,7 @@ { "cell_type": "code", "execution_count": null, - "id": "22", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -296,7 +306,7 @@ { "cell_type": "code", "execution_count": null, - "id": "23", + "id": "24", "metadata": {}, "outputs": [], "source": [ diff --git a/docs/user-guide/offspec/offspec_reduction.ipynb b/docs/user-guide/offspec/offspec_reduction.ipynb index 35981c58..e444e369 100644 --- a/docs/user-guide/offspec/offspec_reduction.ipynb +++ b/docs/user-guide/offspec/offspec_reduction.ipynb @@ -113,8 +113,8 @@ "header.data_source.measurement = fileio.data_source.Measurement(\n", " instrument_settings=fileio.data_source.InstrumentSettings(\n", " incident_angle=fileio.base.Value(\n", - " wf.compute(RawDetectorData[SampleRun]).coords[\"theta\"].value,\n", - " wf.compute(RawDetectorData[SampleRun]).coords[\"theta\"].unit\n", + " wf.compute(DetectorData[SampleRun]).coords[\"theta\"].value,\n", + " wf.compute(DetectorData[SampleRun]).coords[\"theta\"].unit\n", " ),\n", " wavelength=None,\n", " polarization=\"unpolarized\",\n", @@ -183,8 +183,8 @@ "metadata": {}, "outputs": [], "source": [ - "wf.compute(RawDetectorData[SampleRun]).hist(tof=50).plot(norm='log') \\\n", - "+ wf.compute(RawDetectorData[ReferenceRun]).hist(tof=50).plot(norm='log')" + "wf.compute(DetectorData[SampleRun]).hist(tof=50).plot(norm='log') \\\n", + "+ wf.compute(DetectorData[ReferenceRun]).hist(tof=50).plot(norm='log')" ] }, { diff --git a/requirements/base.txt b/requirements/base.txt index e8f9f7ba..76934b25 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -7,7 +7,7 @@ # annotated-types==0.7.0 # via pydantic -click==8.1.8 +click==8.2.0 # via dask cloudpickle==3.1.1 # via dask @@ -17,17 +17,17 @@ cyclebane==24.10.0 # via sciline cycler==0.12.1 # via matplotlib -dask==2025.4.1 +dask==2025.5.0 # via -r base.in dnspython==2.7.0 # via email-validator email-validator==2.2.0 # via scippneutron -essreduce==25.5.0 +essreduce==25.5.1 # via -r base.in -fonttools==4.57.0 +fonttools==4.58.0 # via matplotlib -fsspec==2025.3.2 +fsspec==2025.5.0 # via dask graphviz==0.20.3 # via -r base.in @@ -55,7 +55,7 @@ mpltoolbox==25.4.0 # via scippneutron networkx==3.4.2 # via cyclebane -numpy==2.2.5 +numpy==2.2.6 # via # contourpy # h5py @@ -77,7 +77,7 @@ partd==1.4.2 # via dask pillow==11.2.1 # via matplotlib -plopp==25.4.1 +plopp==25.5.0 # via # -r base.in # scippneutron @@ -100,7 +100,7 @@ pyyaml==6.0.2 # via # dask # orsopy -sciline==25.4.1 +sciline==25.5.1 # via # -r base.in # essreduce diff --git a/requirements/basetest.txt b/requirements/basetest.txt index 9cba074a..8dfac511 100644 --- a/requirements/basetest.txt +++ b/requirements/basetest.txt @@ -9,7 +9,7 @@ certifi==2025.4.26 # via requests charset-normalizer==3.4.2 # via requests -exceptiongroup==1.2.2 +exceptiongroup==1.3.0 # via pytest idna==3.10 # via requests @@ -21,7 +21,7 @@ packaging==25.0 # pytest platformdirs==4.3.8 # via pooch -pluggy==1.5.0 +pluggy==1.6.0 # via pytest pooch==1.8.2 # via -r basetest.in @@ -31,5 +31,7 @@ requests==2.32.3 # via pooch tomli==2.2.1 # via pytest +typing-extensions==4.13.2 + # via exceptiongroup urllib3==2.4.0 # via requests diff --git a/requirements/ci.txt b/requirements/ci.txt index 9484b0ef..a953f075 100644 --- a/requirements/ci.txt +++ b/requirements/ci.txt @@ -36,9 +36,9 @@ platformdirs==4.3.8 # via # tox # virtualenv -pluggy==1.5.0 +pluggy==1.6.0 # via tox -pyproject-api==1.9.0 +pyproject-api==1.9.1 # via tox requests==2.32.3 # via -r ci.in @@ -48,7 +48,7 @@ tomli==2.2.1 # via # pyproject-api # tox -tox==4.25.0 +tox==4.26.0 # via -r ci.in typing-extensions==4.13.2 # via tox diff --git a/requirements/dev.txt b/requirements/dev.txt index 5e1b5861..606bc73e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -28,7 +28,7 @@ cffi==1.17.1 # via argon2-cffi-bindings copier==9.7.1 # via -r dev.in -dunamai==1.24.0 +dunamai==1.24.1 # via copier fqdn==1.5.1 # via jsonschema @@ -57,7 +57,7 @@ jupyter-events==0.12.0 # via jupyter-server jupyter-lsp==2.2.5 # via jupyterlab -jupyter-server==2.15.0 +jupyter-server==2.16.0 # via # jupyter-lsp # jupyterlab @@ -81,7 +81,7 @@ pip-tools==7.4.1 # via pip-compile-multi plumbum==1.9.0 # via copier -prometheus-client==0.21.1 +prometheus-client==0.22.0 # via jupyter-server pycparser==2.22 # via cffi @@ -107,7 +107,7 @@ terminado==0.18.1 # jupyter-server-terminals toposort==1.10 # via pip-compile-multi -types-python-dateutil==2.9.0.20241206 +types-python-dateutil==2.9.0.20250516 # via arrow uri-template==1.3.0 # via jsonschema diff --git a/requirements/docs.txt b/requirements/docs.txt index fc05c6ec..5dac925b 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -48,7 +48,7 @@ docutils==0.21.2 # nbsphinx # pydata-sphinx-theme # sphinx -exceptiongroup==1.2.2 +exceptiongroup==1.3.0 # via ipython executing==2.2.0 # via stack-data @@ -182,11 +182,11 @@ requests==2.32.3 # via # pooch # sphinx -rpds-py==0.24.0 +rpds-py==0.25.0 # via # jsonschema # referencing -snowballstemmer==3.0.0.1 +snowballstemmer==3.0.1 # via sphinx soupsieve==2.7 # via beautifulsoup4 @@ -223,7 +223,7 @@ tinycss2==1.4.0 # via bleach tomli==2.2.1 # via sphinx -tornado==6.4.2 +tornado==6.5 # via # ipykernel # jupyter-client diff --git a/requirements/nightly.txt b/requirements/nightly.txt index 4e99640a..098b1e88 100644 --- a/requirements/nightly.txt +++ b/requirements/nightly.txt @@ -14,7 +14,7 @@ certifi==2025.4.26 # via requests charset-normalizer==3.4.2 # via requests -click==8.1.8 +click==8.2.0 # via dask cloudpickle==3.1.1 # via dask @@ -24,19 +24,19 @@ cyclebane==24.10.0 # via sciline cycler==0.12.1 # via matplotlib -dask==2025.4.1 +dask==2025.5.0 # via -r nightly.in dnspython==2.7.0 # via email-validator email-validator==2.2.0 # via scippneutron -essreduce==25.5.0 +essreduce==25.5.1 # via -r nightly.in -exceptiongroup==1.2.2 +exceptiongroup==1.3.0 # via pytest -fonttools==4.57.0 +fonttools==4.58.0 # via matplotlib -fsspec==2025.3.2 +fsspec==2025.5.0 # via dask graphviz==0.20.3 # via -r nightly.in @@ -68,7 +68,7 @@ mpltoolbox==25.4.0 # via scippneutron networkx==3.4.2 # via cyclebane -numpy==2.2.5 +numpy==2.2.6 # via # contourpy # h5py @@ -98,7 +98,7 @@ plopp @ git+https://github.com/scipp/plopp@main # via # -r nightly.in # scippneutron -pluggy==1.5.0 +pluggy==1.6.0 # via pytest pooch==1.8.2 # via -r nightly.in @@ -158,6 +158,7 @@ toolz==1.0.0 # partd typing-extensions==4.13.2 # via + # exceptiongroup # pydantic # pydantic-core # sciline diff --git a/src/ess/amor/__init__.py b/src/ess/amor/__init__.py index 55475c26..4148b94b 100644 --- a/src/ess/amor/__init__.py +++ b/src/ess/amor/__init__.py @@ -4,6 +4,7 @@ import sciline import scipp as sc +import scippnexus as snx from ..reflectometry import providers as reflectometry_providers from ..reflectometry import supermirror @@ -12,8 +13,8 @@ BeamSize, DetectorSpatialResolution, NeXusDetectorName, + Position, RunType, - SamplePosition, SampleRotationOffset, ) from . import ( @@ -63,8 +64,8 @@ def default_parameters() -> dict: supermirror.Alpha: sc.scalar(0.25 / 0.088, unit=sc.units.angstrom), BeamSize[RunType]: 2.0 * sc.units.mm, DetectorSpatialResolution[RunType]: 0.0025 * sc.units.m, - SamplePosition[RunType]: sc.vector([0, 0, 0], unit="m"), - NeXusDetectorName[RunType]: "detector", + Position[snx.NXsample, RunType]: sc.vector([0, 0, 0], unit="m"), + NeXusDetectorName: "detector", ChopperPhase[RunType]: sc.scalar(7.0, unit="deg"), ChopperFrequency[RunType]: sc.scalar(8.333, unit="Hz"), BeamDivergenceLimits: ( diff --git a/src/ess/amor/load.py b/src/ess/amor/load.py index 20510d9d..8ad1272d 100644 --- a/src/ess/amor/load.py +++ b/src/ess/amor/load.py @@ -1,19 +1,26 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import scipp as sc +import scippnexus as snx + +from ess.reduce.nexus import workflow as nexus_workflow +from ess.reduce.nexus.types import NeXusFileSpec from ..reflectometry.load import load_nx from ..reflectometry.types import ( + Beamline, BeamSize, + DetectorData, DetectorRotation, Filename, - LoadedNeXusDetector, + Measurement, + NeXusComponent, NeXusDetectorName, ProtonCurrent, - RawDetectorData, RawSampleRotation, RunType, SampleRotation, + SampleRun, SampleSize, ) from .geometry import pixel_coordinates_in_detector_system @@ -27,13 +34,13 @@ def load_detector( - file_path: Filename[RunType], detector_name: NeXusDetectorName[RunType] -) -> LoadedNeXusDetector[RunType]: + file_path: Filename[RunType], detector_name: NeXusDetectorName +) -> NeXusComponent[snx.NXdetector, RunType]: return next(load_nx(file_path, f"NXentry/NXinstrument/{detector_name}")) def load_events( - detector: LoadedNeXusDetector[RunType], + detector: NeXusComponent[snx.NXdetector, RunType], detector_rotation: DetectorRotation[RunType], sample_rotation: SampleRotation[RunType], chopper_phase: ChopperPhase[RunType], @@ -42,7 +49,7 @@ def load_events( chopper_separation: ChopperSeparation[RunType], sample_size: SampleSize[RunType], beam_size: BeamSize[RunType], -) -> RawDetectorData[RunType]: +) -> DetectorData[RunType]: event_data = detector["data"] if 'event_time_zero' in event_data.coords: event_data.bins.coords['event_time_zero'] = sc.bins_like( @@ -70,7 +77,7 @@ def load_events( data.coords["chopper_distance"] = chopper_distance data.coords["sample_size"] = sample_size data.coords["beam_size"] = beam_size - return RawDetectorData[RunType](data) + return DetectorData[RunType](data) def amor_chopper(f: Filename[RunType]) -> RawChopper[RunType]: @@ -126,6 +133,18 @@ def load_amor_proton_current( return pc +def load_beamline_metadata(filename: Filename[SampleRun]) -> Beamline: + return nexus_workflow.load_beamline_metadata_from_nexus( + NeXusFileSpec[SampleRun](filename) + ) + + +def load_measurement_metadata(filename: Filename[SampleRun]) -> Measurement: + return nexus_workflow.load_measurement_metadata_from_nexus( + NeXusFileSpec[SampleRun](filename) + ) + + providers = ( load_detector, load_events, @@ -137,4 +156,6 @@ def load_amor_proton_current( load_amor_detector_rotation, load_amor_proton_current, amor_chopper, + load_beamline_metadata, + load_measurement_metadata, ) diff --git a/src/ess/amor/workflow.py b/src/ess/amor/workflow.py index 7aa17041..828abe4f 100644 --- a/src/ess/amor/workflow.py +++ b/src/ess/amor/workflow.py @@ -8,8 +8,8 @@ from ..reflectometry.types import ( BeamDivergenceLimits, CoordTransformationGraph, + DetectorData, ProtonCurrent, - RawDetectorData, ReducibleData, RunType, WavelengthBins, @@ -20,7 +20,7 @@ def add_coords_masks_and_apply_corrections( - da: RawDetectorData[RunType], + da: DetectorData[RunType], ylim: YIndexLimits, zlims: ZIndexLimits, bdlim: BeamDivergenceLimits, diff --git a/src/ess/estia/__init__.py b/src/ess/estia/__init__.py index 08e330e7..742f60f4 100644 --- a/src/ess/estia/__init__.py +++ b/src/ess/estia/__init__.py @@ -2,82 +2,32 @@ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import importlib.metadata -import sciline -import scipp as sc - -from ..reflectometry import providers as reflectometry_providers from ..reflectometry import supermirror -from ..reflectometry.types import ( - BeamDivergenceLimits, - BeamSize, - DetectorSpatialResolution, - NeXusDetectorName, - RunType, - SamplePosition, - SampleRotationOffset, -) from . import conversions, load, maskings, normalization, orso, resolution, workflow from .types import ( AngularResolution, SampleSizeResolution, WavelengthResolution, ) +from .workflow import EstiaMcStasWorkflow, EstiaWorkflow try: __version__ = importlib.metadata.version(__package__ or __name__) except importlib.metadata.PackageNotFoundError: __version__ = "0.0.0" - -providers = ( - *reflectometry_providers, - *load.providers, - *conversions.providers, - *maskings.providers, - *workflow.providers, - *normalization.providers, - *orso.providers, -) -""" -List of providers for setting up a Sciline pipeline. - -This provides a default Estia workflow including providers for loadings files. -""" - - -def default_parameters() -> dict: - return { - supermirror.MValue: sc.scalar(5, unit=sc.units.dimensionless), - supermirror.CriticalEdge: 0.022 * sc.Unit("1/angstrom"), - supermirror.Alpha: sc.scalar(0.25 / 0.088, unit=sc.units.angstrom), - BeamSize[RunType]: 2.0 * sc.units.mm, - DetectorSpatialResolution[RunType]: 0.0025 * sc.units.m, - SamplePosition[RunType]: sc.vector([0, 0, 0], unit="m"), - NeXusDetectorName[RunType]: "detector", - BeamDivergenceLimits: ( - sc.scalar(-0.75, unit='deg'), - sc.scalar(0.75, unit='deg'), - ), - SampleRotationOffset[RunType]: sc.scalar(0.0, unit='deg'), - } - - -def EstiaWorkflow() -> sciline.Pipeline: - """ - Workflow with default parameters for the Estia instrument. - """ - return sciline.Pipeline(providers=providers, params=default_parameters()) - - __all__ = [ "AngularResolution", + "EstiaMcStasWorkflow", "EstiaWorkflow", "SampleSizeResolution", "WavelengthResolution", "conversions", - "default_parameters", "load", - "providers", + "maskings", + "normalization", + "orso", "resolution", "supermirror", + "workflow", ] diff --git a/src/ess/estia/corrections.py b/src/ess/estia/corrections.py index c62d6c8a..db5f60c5 100644 --- a/src/ess/estia/corrections.py +++ b/src/ess/estia/corrections.py @@ -1,7 +1,55 @@ +# SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import scipp as sc +from ..reflectometry.conversions import ( + add_coords, + add_proton_current_coord, + add_proton_current_mask, +) +from ..reflectometry.corrections import correct_by_proton_current +from ..reflectometry.types import ( + BeamDivergenceLimits, + CoordTransformationGraph, + DetectorData, + ProtonCurrent, + ReducibleData, + RunType, + WavelengthBins, + YIndexLimits, + ZIndexLimits, +) +from .maskings import add_masks + + +def add_coords_masks_and_apply_corrections( + da: DetectorData[RunType], + ylim: YIndexLimits, + zlims: ZIndexLimits, + bdlim: BeamDivergenceLimits, + wbins: WavelengthBins, + proton_current: ProtonCurrent[RunType], + graph: CoordTransformationGraph, +) -> ReducibleData[RunType]: + """ + Computes coordinates, masks and corrections that are + the same for the sample measurement and the reference measurement. + """ + da = add_coords(da, graph) + da = add_masks(da, ylim, zlims, bdlim, wbins) + da = correct_by_footprint(da) + + if len(proton_current) != 0: + da = add_proton_current_coord(da, proton_current) + da = add_proton_current_mask(da) + da = correct_by_proton_current(da) + + return ReducibleData[RunType](da) + def correct_by_footprint(da: sc.DataArray) -> sc.DataArray: - "Corrects the data by the size of the footprint on the sample." + """Corrects the data by the size of the footprint on the sample.""" return da / sc.sin(da.coords['theta']) + + +providers = (add_coords_masks_and_apply_corrections,) diff --git a/src/ess/estia/load.py b/src/ess/estia/load.py index 49d00433..27f89964 100644 --- a/src/ess/estia/load.py +++ b/src/ess/estia/load.py @@ -1,10 +1,11 @@ +# SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import h5py import scipp as sc from ..reflectometry.types import ( + DetectorData, Filename, - RawDetectorData, RunType, SampleRotationOffset, ) @@ -14,7 +15,7 @@ def load_mcstas_events( filename: Filename[RunType], sample_rotation_offset: SampleRotationOffset[RunType], -) -> RawDetectorData[RunType]: +) -> DetectorData[RunType]: """ Load event data from a McStas run and reshape it to look like what we would expect if @@ -84,7 +85,7 @@ def load_mcstas_events( da = da.fold('x', sizes={'blade': 14, 'wire': 32}) da.bins.coords.pop('L') da.bins.coords.pop('t') - return RawDetectorData[RunType](da) + return DetectorData[RunType](da) providers = () diff --git a/src/ess/estia/workflow.py b/src/ess/estia/workflow.py index 5ff95cc8..c521589a 100644 --- a/src/ess/estia/workflow.py +++ b/src/ess/estia/workflow.py @@ -1,48 +1,84 @@ +# SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) -from ..reflectometry.conversions import ( - add_coords, - add_proton_current_coord, - add_proton_current_mask, -) -from ..reflectometry.corrections import correct_by_proton_current + +import sciline +import scipp as sc + +from ess.reduce import nexus + +from ..reflectometry import providers as reflectometry_providers +from ..reflectometry import supermirror from ..reflectometry.types import ( BeamDivergenceLimits, - CoordTransformationGraph, - ProtonCurrent, - RawDetectorData, - ReducibleData, + DetectorSpatialResolution, + NeXusDetectorName, + ReferenceRun, RunType, - WavelengthBins, - YIndexLimits, - ZIndexLimits, + SampleRotationOffset, + SampleRun, +) +from . import conversions, corrections, load, maskings, normalization, orso + +_general_providers = ( + *reflectometry_providers, + *conversions.providers, + *corrections.providers, + *maskings.providers, + *normalization.providers, + *orso.providers, +) + +mcstas_providers = ( + *_general_providers, + *load.providers, ) -from .corrections import correct_by_footprint -from .maskings import add_masks - - -def add_coords_masks_and_apply_corrections( - da: RawDetectorData[RunType], - ylim: YIndexLimits, - zlims: ZIndexLimits, - bdlim: BeamDivergenceLimits, - wbins: WavelengthBins, - proton_current: ProtonCurrent[RunType], - graph: CoordTransformationGraph, -) -> ReducibleData[RunType]: - """ - Computes coordinates, masks and corrections that are - the same for the sample measurement and the reference measurement. - """ - da = add_coords(da, graph) - da = add_masks(da, ylim, zlims, bdlim, wbins) - da = correct_by_footprint(da) - - if len(proton_current) != 0: - da = add_proton_current_coord(da, proton_current) - da = add_proton_current_mask(da) - da = correct_by_proton_current(da) - - return ReducibleData[RunType](da) - - -providers = (add_coords_masks_and_apply_corrections,) +"""List of providers for setting up a Sciline pipeline for McStas data. + +This provides a default Estia workflow including providers for loadings files. +""" + +providers = (*_general_providers,) +"""List of providers for setting up a Sciline pipeline data. + +This provides a default Estia workflow including providers for loadings files. +""" + + +def mcstas_default_parameters() -> dict: + return { + supermirror.MValue: sc.scalar(5, unit=sc.units.dimensionless), + supermirror.CriticalEdge: 0.022 * sc.Unit("1/angstrom"), + supermirror.Alpha: sc.scalar(0.25 / 0.088, unit=sc.units.angstrom), + DetectorSpatialResolution[RunType]: 0.0025 * sc.units.m, + NeXusDetectorName: "detector", + BeamDivergenceLimits: ( + sc.scalar(-0.75, unit='deg'), + sc.scalar(0.75, unit='deg'), + ), + SampleRotationOffset[RunType]: sc.scalar(0.0, unit='deg'), + } + + +def default_parameters() -> dict: + return { + NeXusDetectorName: "multiblade_detector", + } + + +def EstiaMcStasWorkflow() -> sciline.Pipeline: + """Workflow for reduction of McStas data for the Estia instrument.""" + return sciline.Pipeline( + providers=mcstas_providers, params=mcstas_default_parameters() + ) + + +def EstiaWorkflow() -> sciline.Pipeline: + """Workflow for reduction of data for the Estia instrument.""" + workflow = nexus.GenericNeXusWorkflow( + run_types=[SampleRun, ReferenceRun], monitor_types=[] + ) + for provider in providers: + workflow.insert(provider) + for name, param in default_parameters().items(): + workflow[name] = param + return workflow diff --git a/src/ess/offspec/load.py b/src/ess/offspec/load.py index 66c3771f..9c199f88 100644 --- a/src/ess/offspec/load.py +++ b/src/ess/offspec/load.py @@ -2,13 +2,13 @@ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import scipp as sc -from ..reflectometry.types import Filename, RawDetectorData, ReferenceRun, RunType +from ..reflectometry.types import DetectorData, Filename, ReferenceRun, RunType from .types import CoordTransformationGraph, MonitorData, NeXusMonitorName def load_offspec_events( filename: Filename[RunType], -) -> RawDetectorData[RunType]: +) -> DetectorData[RunType]: full = sc.io.load_hdf5(filename) da = full['data'] da.coords['theta'] = full.pop('Theta')[-1].data diff --git a/src/ess/offspec/workflow.py b/src/ess/offspec/workflow.py index 8963813c..161d74c1 100644 --- a/src/ess/offspec/workflow.py +++ b/src/ess/offspec/workflow.py @@ -4,7 +4,7 @@ from ..reflectometry import providers as reflectometry_providers from ..reflectometry.types import ( - RawDetectorData, + DetectorData, ReducibleData, RunType, WavelengthBins, @@ -37,7 +37,7 @@ def OffspecWorkflow() -> sciline.Pipeline: def add_coords_masks_and_apply_corrections( - da: RawDetectorData[RunType], + da: DetectorData[RunType], spectrum_limits: SpectrumLimits, wlims: WavelengthBins, wbmin: BackgroundMinWavelength, diff --git a/src/ess/reflectometry/orso.py b/src/ess/reflectometry/orso.py index 268866e0..25b6e5db 100644 --- a/src/ess/reflectometry/orso.py +++ b/src/ess/reflectometry/orso.py @@ -13,14 +13,15 @@ import numpy as np import scipp as sc -from dateutil.parser import parse as parse_datetime from orsopy.fileio import base as orso_base from orsopy.fileio import data_source, orso, reduction from orsopy.fileio.orso import Column, Orso, OrsoDataset from .load import load_nx from .types import ( + Beamline, Filename, + Measurement, ReducibleData, ReferenceRun, ReflectivityOverQ, @@ -60,21 +61,18 @@ OrsoCorrectionList = NewType("OrsoCorrectionList", list[str]) -def parse_orso_experiment(filename: Filename[SampleRun]) -> OrsoExperiment: +def parse_orso_experiment( + beamline: Beamline, measurement: Measurement +) -> OrsoExperiment: """Parse ORSO experiment metadata from raw NeXus data.""" - title, instrument_name, facility, start_time = load_nx( - filename, - "NXentry/title", - "NXentry/NXinstrument/name", - "NXentry/facility", - "NXentry/start_time", - ) return OrsoExperiment( data_source.Experiment( - title=title, - instrument=instrument_name, - facility=facility, - start_date=parse_datetime(start_time), + instrument=beamline.name, + facility=beamline.facility, + title=measurement.title, + start_date=measurement.start_time, + proposalID=measurement.experiment_id, + doi=measurement.experiment_doi, probe="neutron", ) ) @@ -101,7 +99,7 @@ def parse_orso_sample(filename: Filename[SampleRun]) -> OrsoSample: data_source.Sample( name=sample["name"], model=data_source.SampleModel( - stack=sample["model"], + stack=sample.get("model", ""), ), ) ) diff --git a/src/ess/reflectometry/types.py b/src/ess/reflectometry/types.py index 14a5d51b..b14b0243 100644 --- a/src/ess/reflectometry/types.py +++ b/src/ess/reflectometry/types.py @@ -1,39 +1,27 @@ +# SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) from typing import Any, NewType, TypeVar import sciline import scipp as sc -ReferenceRun = NewType("ReferenceRun", str) -SampleRun = NewType("SampleRun", str) -RunType = TypeVar("RunType", ReferenceRun, SampleRun) - -CoordTransformationGraph = NewType("CoordTransformationGraph", dict) - - -class NeXusDetectorName(sciline.Scope[RunType, str], str): - """Name of the detector in the nexus file containing the events of the RunType""" - - -class DetectorPosition(sciline.Scope[RunType, sc.Variable], sc.Variable): - """Positions of the detector pixels, relative to the source(?), as a 3d-vector""" - +from ess.reduce.nexus import types as reduce_t -class SamplePosition(sciline.Scope[RunType, sc.Variable], sc.Variable): - """The position of the sample relative to the source(?).""" - - -class SpecularReflectionCoordTransformGraph(sciline.Scope[RunType, dict], dict): - """Coordinate transformation graph for specular reflection""" - - -class RawDetectorData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): - """Event time data from nexus file, - binned by `detector_number` (pixel of the detector frame).""" +SampleRun = reduce_t.SampleRun +ReferenceRun = NewType("ReferenceRun", int) +RunType = TypeVar("RunType", ReferenceRun, SampleRun) +Beamline = reduce_t.Beamline +CalibratedDetector = reduce_t.CalibratedDetector +DetectorData = reduce_t.DetectorData +DetectorPositionOffset = reduce_t.DetectorPositionOffset +Filename = reduce_t.Filename +Measurement = reduce_t.Measurement +NeXusComponent = reduce_t.NeXusComponent +NeXusDetectorName = reduce_t.NeXusDetectorName +Position = reduce_t.Position -class LoadedNeXusDetector(sciline.Scope[RunType, sc.DataGroup], sc.DataGroup): - """NXdetector loaded from file""" +CoordTransformationGraph = NewType("CoordTransformationGraph", dict) class ReducibleData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): @@ -75,10 +63,6 @@ class ThetaBins(sciline.Scope[RunType, sc.Variable], sc.Variable): detector pixels have the same theta value.""" -class Filename(sciline.Scope[RunType, str], str): - """Filename of an event data nexus file.""" - - class RawSampleRotation(sciline.Scope[RunType, sc.Variable], sc.Variable): """The rotation of the sample registered in the NeXus file.""" diff --git a/tests/estia/mcstas_data_test.py b/tests/estia/mcstas_data_test.py index 79ab490f..20fb7c9c 100644 --- a/tests/estia/mcstas_data_test.py +++ b/tests/estia/mcstas_data_test.py @@ -11,7 +11,7 @@ import scipp as sc from orsopy import fileio -from ess.estia import EstiaWorkflow +from ess.estia import EstiaMcStasWorkflow from ess.estia.data import estia_mcstas_reference_run, estia_mcstas_sample_run from ess.estia.load import load_mcstas_events from ess.reflectometry import orso @@ -32,7 +32,7 @@ @pytest.fixture def estia_mcstas_pipeline() -> sciline.Pipeline: - wf = EstiaWorkflow() + wf = EstiaMcStasWorkflow() wf.insert(load_mcstas_events) wf[Filename[ReferenceRun]] = estia_mcstas_reference_run()