diff --git a/docs/about/bibliography.rst b/docs/about/bibliography.rst
new file mode 100644
index 00000000..0d21440d
--- /dev/null
+++ b/docs/about/bibliography.rst
@@ -0,0 +1,4 @@
+Bibliography
+============
+
+.. bibliography::
diff --git a/docs/about/index.md b/docs/about/index.md
index 9a118f61..e7291804 100644
--- a/docs/about/index.md
+++ b/docs/about/index.md
@@ -24,3 +24,11 @@ Simply download the archive, unzip and view locally in a web browser.
## Source code and development
ESSdiffraction is hosted and developed [on GitHub](https://github.com/scipp/essdiffraction).
+
+```{toctree}
+---
+hidden:
+---
+
+bibliography
+```
diff --git a/docs/bibliography.bib b/docs/bibliography.bib
new file mode 100644
index 00000000..60c57505
--- /dev/null
+++ b/docs/bibliography.bib
@@ -0,0 +1,8 @@
+@book{Arblaster:2018,
+ author = {John W. Arblaster},
+ year = {2018},
+ title = {Selected Values of the Crystallographic Properties of the Elements},
+ publisher = {ASM International},
+ ISBN = {978-1-62708-154-2},
+ url = {https://www.asminternational.org/selected-values-of-the-crystallographic-properties-of-the-elements/results/-/journal_content/56/39867022/PUBLICATION/}
+}
diff --git a/docs/conf.py b/docs/conf.py
index 37ffb8fa..92e52d92 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -26,6 +26,7 @@
"sphinx_autodoc_typehints",
"sphinx_copybutton",
"sphinx_design",
+ "sphinxcontrib.bibtex",
"nbsphinx",
"myst_parser",
]
@@ -257,3 +258,7 @@ def do_not_plot(*args, **kwargs):
# Specific lines in Github blobs cannot be found by linkcheck.
r"https?://github\.com/.*?/blob/[a-f0-9]+/.+?#",
]
+
+# -- Options for bibtex ---------------------------------------------------
+bibtex_bibfiles = ["bibliography.bib"]
+bibtex_reference_style = "label"
diff --git a/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb b/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb
index 58664917..b44983dc 100644
--- a/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb
+++ b/docs/user-guide/sns-instruments/POWGEN_data_reduction.ipynb
@@ -387,7 +387,8 @@
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
- "pygments_lexer": "ipython3"
+ "pygments_lexer": "ipython3",
+ "version": "3.10.12"
}
},
"nbformat": 4,
diff --git a/docs/user-guide/sns-instruments/index.md b/docs/user-guide/sns-instruments/index.md
index 6c1321e2..9fab1e6e 100644
--- a/docs/user-guide/sns-instruments/index.md
+++ b/docs/user-guide/sns-instruments/index.md
@@ -9,4 +9,5 @@ maxdepth: 1
---
POWGEN_data_reduction
+vanadium_processing
```
diff --git a/docs/user-guide/sns-instruments/vanadium_processing.ipynb b/docs/user-guide/sns-instruments/vanadium_processing.ipynb
new file mode 100644
index 00000000..e4019d4e
--- /dev/null
+++ b/docs/user-guide/sns-instruments/vanadium_processing.ipynb
@@ -0,0 +1,381 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "fb973780-910e-48c0-9055-672182f300f9",
+ "metadata": {},
+ "source": [
+ "# Vanadium processing\n",
+ "\n",
+ "The main workflow in [POWGEN_data_reduction](./POWGEN_data_reduction.rst) misses some reduction steps for vanadium.\n",
+ "In particular, we need to remove coherent scattering peaks from vanadium data.\n",
+ "This is not part of the regular workflow as it relies on fitting.\n",
+ "And since fitting can easily break in a way that is hard to detect automatically, a human should inspect the results.\n",
+ "Additionally, vanadium measurements can be processed separately from sample measurements and saved to files.\n",
+ "The processed vanadium data can then be used in the main workflow directly.\n",
+ "\n",
+ "This notebook outlines how to process a vanadium run.\n",
+ "First, we convert the data to d-spacing using the same workflow as in [POWGEN_data_reduction](./POWGEN_data_reduction.rst)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "b38ce909-c9ec-4d75-8060-3b9e3094ef78",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import scipp as sc\n",
+ "import scippneutron as scn\n",
+ "import scippneutron.peaks\n",
+ "\n",
+ "from ess import powder\n",
+ "from ess.snspowder import powgen\n",
+ "from ess.powder.types import *"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "bf03dd14-a096-4cf0-8f0b-c318eb507391",
+ "metadata": {},
+ "source": [
+ "Use the same parameters as in the main workflow except with more d-spacing bins.\n",
+ "We need the high d-spacing resolution later when removing coherent scattering peaks."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "01153a77-1710-41fc-a78c-20a8bae40831",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "workflow = powgen.PowgenWorkflow()\n",
+ "\n",
+ "# Use a large number of bins.\n",
+ "workflow[DspacingBins] = sc.linspace(\"dspacing\", 0.0, 2.3434, 5001, unit=\"angstrom\")\n",
+ "\n",
+ "workflow[Filename[SampleRun]] = powgen.data.powgen_tutorial_sample_file()\n",
+ "workflow[Filename[VanadiumRun]] = powgen.data.powgen_tutorial_vanadium_file()\n",
+ "workflow[CalibrationFilename] = powgen.data.powgen_tutorial_calibration_file()\n",
+ "\n",
+ "workflow[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.drop\n",
+ "\n",
+ "workflow[TofMask] = lambda x: (x < sc.scalar(0.0, unit=\"us\")) | (x > sc.scalar(16666.67, unit=\"us\"))\n",
+ "workflow[TwoThetaMask] = None\n",
+ "workflow[WavelengthMask] = None\n",
+ "\n",
+ "workflow = powder.with_pixel_mask_filenames(workflow, [])"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "12ac6517-a73c-493b-b2ce-f7ffe13a2060",
+ "metadata": {},
+ "source": [
+ "Compute a single vanadium spectrum in d-spacing:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "646a1117-3a71-4893-b2a2-13675cf749c7",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "peaked_data = workflow.compute(FocussedDataDspacing[VanadiumRun])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "8de3d6e0-b8ec-4a25-bcb0-b0d2b19b8a61",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "peaked_data"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "04f7f83c-8608-458d-b4a9-2f9853c09cd7",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "peaked_data.hist().plot()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "84f8b41c-e3f3-4282-83c6-e0e424bc5632",
+ "metadata": {},
+ "source": [
+ "## Removing coherent scattering peaks\n",
+ "\n",
+ "As the variable name `peaked_data` implies, the produced spectrum contains peaks from coherent scattering.\n",
+ "Even though the peaks are small for vanadium, we need to remove them to extract pure incoherent scattering.\n",
+ "We can approximate the coherent scattering contribution by fitting functions to the peaks and subtracting those fitted functions.\n",
+ "[scippneutron.peaks](https://scipp.github.io/scippneutron/generated/modules/scippneutron.peaks.html) contains general functionality for fitting and removing peaks.\n",
+ "Here, we use it through [ess.snspowder.powgen.peaks](../../generated/modules/ess.snspowder.powgen.peaks.rst) which provides useful defaults for vanadium peaks at POWGEN.\n",
+ "For example, it selects appropriate models for peaks (gaussian) and backgrounds (linear and quadratic).\n",
+ "\n",
+ "First, define estimates for the peaks based on the known crystal structure of vanadium:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "7140935e-8b32-403f-a0d8-e66293819b40",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "peak_estimates = powgen.peaks.theoretical_vanadium_dspacing(\n",
+ " hkl_range=7, min_d=sc.scalar(0.41, unit='angstrom')\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6e530963-258e-475f-9128-2417bc8d7e24",
+ "metadata": {},
+ "source": [
+ "We need to histogram the data to perform fits:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "d6bcd10f-130e-43e0-8231-1f0b70e8bc53",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "peak_histogram = peaked_data.hist()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1c4ff398-80d4-4452-92d7-3d3c8d1359dc",
+ "metadata": {},
+ "source": [
+ "The fits require a bin-center coordinate, so convert from bin-edges:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "e661fda9-1de4-4d26-a8d7-c84cec10decd",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "to_fit = peak_histogram.copy(deep=False)\n",
+ "to_fit.coords['dspacing'] = sc.midpoints(to_fit.coords['dspacing'])"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "b0bbf51f-307f-43c5-8093-ff6606049a1f",
+ "metadata": {},
+ "source": [
+ "Perform the fits:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "49f574d8-de6f-4c69-9704-6ce4acb9bdb3",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "fit_results = powgen.peaks.fit_vanadium_peaks(to_fit, peak_estimates=peak_estimates)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "19251c69-75eb-4dd0-be97-d2272946bacd",
+ "metadata": {},
+ "source": [
+ "Remove the fitted peaks to obtain the incoherent scattering.\n",
+ "Also restore the bin-edge coordinate that we had to replace temporarily for the fits.\n",
+ "\n",
+ "Importantly, we remove variances from the data.\n",
+ "If we kept the variances, subtracting the fitted models would introduce correlations between the data points.\n",
+ "This corresponds to [UncertaintyBroadcastMode.drop](../../generated/modules/ess.powder.types.UncertaintyBroadcastMode.rst) in the main workflow.\n",
+ "See also the [guide in ESSreduce](https://scipp.github.io/essreduce/user-guide/reduction-workflow-guidelines.html#s-8-propagation-of-uncertainties-in-broadcast-operations-should-support-drop-and-upper-bound-strategies-upper-bound-shall-be-the-default)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "fbcde35f-8608-4ecb-9b6d-67f686d7839a",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "incoherent = scn.peaks.remove_peaks(sc.values(to_fit), fit_results)\n",
+ "incoherent.coords['dspacing'] = peak_histogram.coords['dspacing']\n",
+ "incoherent.plot()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6f19a754-c41a-4507-9ac5-a9f68dbab60f",
+ "metadata": {},
+ "source": [
+ "We can further inspect the results.\n",
+ "Below, there is a function that plots\n",
+ "\n",
+ "- the data with coherent and incoherent scattering (blue),\n",
+ "- the resulting incoherent curve (green),\n",
+ "- the fitted models (orange),\n",
+ "- the fit windows (gray and red boxes),\n",
+ "- and the initial estimates (dashed vertical lines).\n",
+ "\n",
+ "Some fits failed as indicated by red boxes and short descriptions of why the fits failed.\n",
+ "Some peaks are absent from the data used here, even though they are expected based on the crystal structure.\n",
+ "So those fits are expected to fail.\n",
+ "All other fits appear to have succeeded.\n",
+ "\n",
+ "See [scippneutron.peaks.fit_peaks](https://scipp.github.io/scippneutron/generated/modules/scippneutron.peaks.fit_peaks.html) for options to customize the fit procedure if it does not work as desired.\n",
+ "\n",
+ "
\n",
+ "\n",
+ "**Note**\n",
+ "\n",
+ "It is highly recommended to inspect the plot in detail to check whether all fits have succeeded or failed as expected!\n",
+ "Fitting is not always reliable and may fail for many reasons.\n",
+ "You can make plots interactive by using `%matplotlib widget`.\n",
+ "\n",
+ "
"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "0fd4e65a-35b3-4c12-a4ee-24649bda2551",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def peak_removal_diagnostic(\n",
+ " data: sc.DataArray,\n",
+ " removed: sc.DataArray,\n",
+ " fit_results: list[scn.peaks.FitResult],\n",
+ " peak_estimates: sc.Variable,\n",
+ " *,\n",
+ " xlim: tuple[sc.Variable, sc.Variable] | None=None,\n",
+ "):\n",
+ " if xlim is not None:\n",
+ " def in_range(x: sc.Variable) -> bool:\n",
+ " return sc.isfinite(x) and (xlim[0] <= x) and (x < xlim[1])\n",
+ " data = data[data.dim, xlim[0]:xlim[1]]\n",
+ " removed = removed[removed.dim, xlim[0]:xlim[1]]\n",
+ " fit_results, peak_estimates = zip(*(\n",
+ " (r, e)\n",
+ " for r, e in zip(fit_results, peak_estimates, strict=True)\n",
+ " if in_range(r.window[0]) and in_range(r.window[1])\n",
+ " ), strict=True)\n",
+ "\n",
+ " # The actual data\n",
+ " fig = data.plot(c=\"C0\")\n",
+ " ax = fig.ax\n",
+ " removed.plot(ax=ax, c='C2')\n",
+ "\n",
+ " # Initial estimates\n",
+ " for estimate, result in zip(peak_estimates, fit_results, strict=True):\n",
+ " ax.axvline(\n",
+ " x=estimate.value,\n",
+ " color=\"black\" if result.success else \"red\",\n",
+ " alpha=0.5,\n",
+ " lw=1,\n",
+ " ls=\":\",\n",
+ " )\n",
+ "\n",
+ " # Fit windows\n",
+ " for result in fit_results:\n",
+ " left = result.window[0]\n",
+ " right = result.window[1]\n",
+ " sl = data[data.dim, left:right]\n",
+ " lo = sl.min().value * 0.95\n",
+ " hi = sl.max().value * 1.05\n",
+ " ax.fill_betweenx(\n",
+ " (lo, hi),\n",
+ " left.value,\n",
+ " right.value,\n",
+ " facecolor=\"black\" if result.success else \"red\",\n",
+ " alpha=0.2,\n",
+ " )\n",
+ " if not result.success:\n",
+ " ax.text(left.value, hi, result.message.split(\":\", 1)[0])\n",
+ "\n",
+ " # Overlay with fit models evaluated at optimized parameters\n",
+ " for result in fit_results:\n",
+ " if all(not sc.isnan(param).value for param in result.popt.values()):\n",
+ " best_fit = data[data.dim, result.window[0] : result.window[1]].copy(deep=False)\n",
+ " best_fit.coords[best_fit.dim] = sc.midpoints(best_fit.coords[best_fit.dim])\n",
+ " best_fit.data = result.eval_model(best_fit.coords[best_fit.dim])\n",
+ " best_fit.plot(ax=ax, c=\"C1\", ls=\"-\", marker=\"none\")\n",
+ "\n",
+ " return fig"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "bee3ae5a-ae4c-41a8-901b-b0fada19504c",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "peak_removal_diagnostic(\n",
+ " peak_histogram,\n",
+ " incoherent,\n",
+ " fit_results,\n",
+ " peak_estimates,\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "fa420140-073c-4104-b9aa-990190f9bd4f",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "peak_removal_diagnostic(\n",
+ " peak_histogram,\n",
+ " incoherent,\n",
+ " fit_results,\n",
+ " peak_estimates,\n",
+ " xlim=(0.37 * sc.Unit(\"Å\"), 0.56 * sc.Unit(\"Å\")),\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6bab60d1-1ea9-4481-9f5c-1d55423766c7",
+ "metadata": {},
+ "source": [
+ "The resulting data array `incoherent` can be saved and used in the main workflow [POWGEN_data_reduction](./POWGEN_data_reduction.rst) to replace `FocussedDataDspacing[VanadiumRun]`."
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.13"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/pyproject.toml b/pyproject.toml
index b0fefc7d..63c82a58 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -38,7 +38,7 @@ dependencies = [
"pythreejs",
"sciline>=24.06.0",
"scipp>=23.8.0",
- "scippneutron>=23.9.0",
+ "scippneutron>=24.5.0",
"scippnexus>=23.12.0",
]
@@ -84,7 +84,7 @@ ignore = [
"COM812", "COM819", "D206", "D300", "E111", "E114", "E117", "ISC001", "ISC002", "Q000", "Q001", "Q002", "Q003", "W191",
]
fixable = ["I001", "B010"]
-isort.known-first-party = ["essdiffraction"]
+isort.known-first-party = ["ess.diffraction", "ess.dream", "ess.powder"]
pydocstyle.convention = "numpy"
[tool.ruff.lint.per-file-ignores]
diff --git a/requirements/base.in b/requirements/base.in
index cc9d0e4e..eba882d8 100644
--- a/requirements/base.in
+++ b/requirements/base.in
@@ -10,5 +10,5 @@ plopp
pythreejs
sciline>=24.06.0
scipp>=23.8.0
-scippneutron>=23.9.0
+scippneutron>=24.5.0
scippnexus>=23.12.0
diff --git a/requirements/base.txt b/requirements/base.txt
index 63c63123..942dc4ed 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -1,4 +1,4 @@
-# SHA1:379b8589d7611e6075ce0ff7558db3c0ea3f4f7c
+# SHA1:d8ffbcd7269e8e6905a85cd09c6df5878084b9e8
#
# This file is autogenerated by pip-compile-multi
# To update, run:
diff --git a/requirements/docs.in b/requirements/docs.in
index 813dd56f..a1110dd5 100644
--- a/requirements/docs.in
+++ b/requirements/docs.in
@@ -10,6 +10,7 @@ sphinx
sphinx-autodoc-typehints
sphinx-copybutton
sphinx-design
+sphinxcontrib-bibtex
# needed by pandas < 3.0
pyarrow
diff --git a/requirements/docs.txt b/requirements/docs.txt
index 4d317a20..b2ab1be9 100644
--- a/requirements/docs.txt
+++ b/requirements/docs.txt
@@ -1,4 +1,4 @@
-# SHA1:5644acd1b52f1f99fbca1349c29761b90702d03a
+# SHA1:a0b29b772e4f1fe4102ea0ecf6978eae404c635e
#
# This file is autogenerated by pip-compile-multi
# To update, run:
@@ -32,8 +32,10 @@ docutils==0.21.2
# via
# myst-parser
# nbsphinx
+ # pybtex-docutils
# pydata-sphinx-theme
# sphinx
+ # sphinxcontrib-bibtex
fastjsonschema==2.20.0
# via nbformat
imagesize==1.4.1
@@ -67,6 +69,8 @@ jupyter-core==5.7.2
# nbformat
jupyterlab-pygments==0.3.0
# via nbconvert
+latexcodec==3.0.0
+ # via pybtex
markdown-it-py==3.0.0
# via
# mdit-py-plugins
@@ -100,10 +104,16 @@ pandas==2.2.2
# via -r docs.in
pandocfilters==1.5.1
# via nbconvert
-psutil==5.9.8
+psutil==6.0.0
# via ipykernel
pyarrow==16.1.0
# via -r docs.in
+pybtex==0.24.0
+ # via
+ # pybtex-docutils
+ # sphinxcontrib-bibtex
+pybtex-docutils==1.0.3
+ # via sphinxcontrib-bibtex
pydata-sphinx-theme==0.15.3
# via -r docs.in
pytz==2024.1
@@ -133,6 +143,7 @@ sphinx==7.3.7
# sphinx-autodoc-typehints
# sphinx-copybutton
# sphinx-design
+ # sphinxcontrib-bibtex
sphinx-autodoc-typehints==2.1.1
# via -r docs.in
sphinx-copybutton==0.5.2
@@ -141,6 +152,8 @@ sphinx-design==0.6.0
# via -r docs.in
sphinxcontrib-applehelp==1.0.8
# via sphinx
+sphinxcontrib-bibtex==2.6.2
+ # via -r docs.in
sphinxcontrib-devhelp==1.0.6
# via sphinx
sphinxcontrib-htmlhelp==2.0.5
diff --git a/src/ess/dream/io/geant4.py b/src/ess/dream/io/geant4.py
index cae2fa96..92081ab4 100644
--- a/src/ess/dream/io/geant4.py
+++ b/src/ess/dream/io/geant4.py
@@ -6,6 +6,8 @@
import numpy as np
import sciline
import scipp as sc
+from ess.reduce.nexus import extract_detector_data
+
from ess.powder.types import (
CalibrationData,
CalibrationFilename,
@@ -22,7 +24,6 @@
SampleRun,
SourcePosition,
)
-from ess.reduce.nexus import extract_detector_data
MANTLE_DETECTOR_ID = sc.index(7)
HIGH_RES_DETECTOR_ID = sc.index(8)
diff --git a/src/ess/dream/io/nexus.py b/src/ess/dream/io/nexus.py
index 3de86b88..98fb618f 100644
--- a/src/ess/dream/io/nexus.py
+++ b/src/ess/dream/io/nexus.py
@@ -14,6 +14,8 @@
"""
import scipp as sc
+from ess.reduce import nexus
+
from ess.powder.types import (
Filename,
LoadedNeXusDetector,
@@ -26,7 +28,6 @@
SamplePosition,
SourcePosition,
)
-from ess.reduce import nexus
DETECTOR_BANK_SIZES = {
"endcap_backward_detector": {
diff --git a/src/ess/dream/workflow.py b/src/ess/dream/workflow.py
index ffd91093..0b7b8291 100644
--- a/src/ess/dream/workflow.py
+++ b/src/ess/dream/workflow.py
@@ -3,6 +3,7 @@
import sciline
import scipp as sc
+
from ess.powder import providers as powder_providers
from ess.powder.types import (
AccumulatedProtonCharge,
diff --git a/src/ess/snspowder/powgen/__init__.py b/src/ess/snspowder/powgen/__init__.py
index f175f21b..36af63d0 100644
--- a/src/ess/snspowder/powgen/__init__.py
+++ b/src/ess/snspowder/powgen/__init__.py
@@ -7,7 +7,7 @@
the ``dream`` module when that is available.
"""
-from . import beamline, data
+from . import beamline, data, peaks
from .calibration import load_calibration
from .instrument_view import instrument_view
from .workflow import PowgenWorkflow, default_parameters
@@ -26,5 +26,6 @@
'default_parameters',
'instrument_view',
'load_calibration',
+ 'peaks',
'providers',
]
diff --git a/src/ess/snspowder/powgen/peaks.py b/src/ess/snspowder/powgen/peaks.py
new file mode 100644
index 00000000..86a5cad7
--- /dev/null
+++ b/src/ess/snspowder/powgen/peaks.py
@@ -0,0 +1,134 @@
+# SPDX-License-Identifier: BSD-3-Clause
+# Copyright (c) 2024 Scipp contributors (https://github.com/scipp)
+"""Peak fitting and removal.
+
+This module is specialized to POWGEN.
+"""
+
+import math
+from collections.abc import Iterable
+from itertools import combinations_with_replacement
+
+import scipp as sc
+from scippneutron.peaks import FitParameters, FitRequirements, FitResult, fit_peaks
+from scippneutron.peaks.model import Model
+
+
+def theoretical_vanadium_dspacing(
+ *, hkl_range: int = 10, min_d: sc.Variable | None = None
+) -> sc.Variable:
+ r"""Return the d-spacing values for vanadium in an ideal case.
+
+ Based on the bcc structure of vanadium, the values are
+
+ .. math::
+
+ d = \frac{a}{\sqrt{h^2 + k^2 + l^2}}
+
+ where :math:`a = 3.0272` Å is the lattice constant
+ of vanadium :cite:`Arblaster:2018` and :math:`h+k+l` is even.
+
+ Parameters
+ ----------
+ hkl_range:
+ h, k, l are each limited to the integer interval ``[0, hkl_range]``.
+ min_d:
+ If given, only return values greater than this.
+
+ Returns
+ -------
+ :
+ Array of vanadium d-spacing values.
+ Has dimension ``'dspacing'``.
+ """
+ a = 3.0272
+ d_values = {
+ a / math.sqrt(h**2 + k**2 + l**2)
+ for h, k, l in combinations_with_replacement(range(hkl_range), 3) # noqa: E741
+ if (h + k + l) % 2 == 0 and (h + k + l) > 0
+ }
+ d = sc.array(dims=['dspacing'], values=sorted(d_values), unit='angstrom')
+ if min_d is not None:
+ return d[d > min_d]
+ return d
+
+
+def fit_vanadium_peaks(
+ data: sc.DataArray,
+ *,
+ peak_estimates: sc.Variable | None = None,
+ windows: sc.Variable | None = None,
+ background: Model | str | Iterable[Model] | Iterable[str] | None = None,
+ peak: Model | str | Iterable[Model] | Iterable[str] | None = None,
+ fit_parameters: FitParameters | None = None,
+ fit_requirements: FitRequirements | None = None,
+) -> list[FitResult]:
+ """Fit coherent scattering peaks of vanadium.
+
+ This function wraps :func:`scippneutron.peaks.fit_peaks` and provides
+ default parameters for vanadium at POWGEN.
+
+ Parameters
+ ----------
+ data:
+ A 1d data array where ``data.data`` is the dependent variable
+ and ``data.coords[data.dim]`` is the independent variable for the fit.
+ Must be 1-dimensional and not binned.
+ peak_estimates:
+ Initial estimates of peak locations.
+ A peak will be fitted for each estimate.
+ Must be a 1d variable with dimension ``data.dim``.
+ If ``None``, estimates are derived using :func:`theoretical_vanadium_dspacing`.
+ windows:
+ If a scalar, the size of fit windows.
+ A window is constructed for each peak estimate centered on the estimate
+ with a width equal to ``windows`` (adjusted to the data range and to maintain
+ a separation between peaks, see
+ :attr:`scippneutron.peaks.FitParameters.neighbor_separation_factor`).
+
+ If a 2d array, the windows for each peak.
+ Must have sizes ``{data.dim: len(data), 'range': 2}`` where
+ ``windows['range', 0]`` and ``windows['range', 1]`` are the lower and upper
+ bounds of the fit windows, respectively.
+ The windows are not adjusted automatically in this case.
+
+ Defaults to ``sc.scalar(0.02, unit='angstrom')``.
+ background:
+ The background model or models.
+ Defaults to ``('linear', 'quadratic')``.
+ That is, a fit with a linear background is attempted, and if the fit fails,
+ a quadratic background is tried.
+ peak:
+ The peak model or models.
+ Defaults to ``'gaussian'``.
+ fit_parameters:
+ Parameters for the fit not otherwise listed as function arguments.
+ fit_requirements:
+ Constraints on the fit result.
+
+ Returns
+ -------
+ :
+ A :class:`FitResult` for each peak.
+ """
+ if peak_estimates is None:
+ peak_estimates = theoretical_vanadium_dspacing(
+ hkl_range=10, min_d=sc.scalar(0.41, unit='angstrom')
+ )
+ if windows is None:
+ windows = sc.scalar(0.02, unit='angstrom')
+ if background is None:
+ background = ('linear', 'quadratic')
+ if peak is None:
+ peak = 'gaussian'
+
+ fits = fit_peaks(
+ data,
+ peak_estimates=peak_estimates,
+ windows=windows,
+ background=background,
+ peak=peak,
+ fit_parameters=fit_parameters,
+ fit_requirements=fit_requirements,
+ )
+ return fits
diff --git a/src/ess/snspowder/powgen/workflow.py b/src/ess/snspowder/powgen/workflow.py
index 96af8743..64a7dc2e 100644
--- a/src/ess/snspowder/powgen/workflow.py
+++ b/src/ess/snspowder/powgen/workflow.py
@@ -2,6 +2,7 @@
# Copyright (c) 2024 Scipp contributors (https://github.com/scipp)
import sciline
+
from ess.powder import providers as powder_providers
from ess.powder.types import NeXusDetectorName
diff --git a/tests/dream/geant4_reduction_test.py b/tests/dream/geant4_reduction_test.py
index b73ca75d..ad1d160f 100644
--- a/tests/dream/geant4_reduction_test.py
+++ b/tests/dream/geant4_reduction_test.py
@@ -5,6 +5,7 @@
import sciline
import scipp as sc
from ess import dream, powder
+
from ess.powder.types import (
AccumulatedProtonCharge,
CalibrationFilename,
diff --git a/tests/dream/instrument_view_test.py b/tests/dream/instrument_view_test.py
index 09682860..4e3fd46d 100644
--- a/tests/dream/instrument_view_test.py
+++ b/tests/dream/instrument_view_test.py
@@ -4,6 +4,7 @@
import numpy as np
import pytest
import scipp as sc
+
from ess.dream.instrument_view import InstrumentView
diff --git a/tests/dream/io/geant4_test.py b/tests/dream/io/geant4_test.py
index 5b78cd25..8424c9dd 100644
--- a/tests/dream/io/geant4_test.py
+++ b/tests/dream/io/geant4_test.py
@@ -10,6 +10,7 @@
import sciline
import scipp as sc
import scipp.testing
+
from ess.dream import data, load_geant4_csv
from ess.powder.types import Filename, NeXusDetectorName, RawDetectorData, SampleRun
diff --git a/tests/dream/io/nexus_test.py b/tests/dream/io/nexus_test.py
index 126197ac..762a7a86 100644
--- a/tests/dream/io/nexus_test.py
+++ b/tests/dream/io/nexus_test.py
@@ -3,6 +3,7 @@
import pytest
import sciline
from ess import dream
+
from ess.dream import nexus
from ess.powder.types import (
Filename,
diff --git a/tests/powder/conversion_test.py b/tests/powder/conversion_test.py
index ba0f9c30..d07bbee9 100644
--- a/tests/powder/conversion_test.py
+++ b/tests/powder/conversion_test.py
@@ -5,6 +5,7 @@
import scipp as sc
import scipp.testing
import scippneutron as scn
+
from ess.powder.conversion import (
add_scattering_coordinates_from_positions,
to_dspacing_with_calibration,
diff --git a/tests/powder/correction_test.py b/tests/powder/correction_test.py
index 0f731529..0c37ef16 100644
--- a/tests/powder/correction_test.py
+++ b/tests/powder/correction_test.py
@@ -4,6 +4,7 @@
import pytest
import scipp as sc
import scipp.testing
+
from ess.powder.correction import apply_lorentz_correction, merge_calibration
diff --git a/tests/powder/filtering_test.py b/tests/powder/filtering_test.py
index ff357138..3c4b9604 100644
--- a/tests/powder/filtering_test.py
+++ b/tests/powder/filtering_test.py
@@ -6,6 +6,7 @@
import numpy as np
import scipp as sc
+
from ess.powder import filtering
diff --git a/tests/snspowder/powgen/powgen_reduction_test.py b/tests/snspowder/powgen/powgen_reduction_test.py
index cbd769f3..f4f90497 100644
--- a/tests/snspowder/powgen/powgen_reduction_test.py
+++ b/tests/snspowder/powgen/powgen_reduction_test.py
@@ -5,6 +5,8 @@
import sciline
import scipp as sc
from ess import powder
+from ess.snspowder import powgen
+
from ess.powder.types import (
CalibrationFilename,
DspacingBins,
@@ -22,7 +24,6 @@
VanadiumRun,
WavelengthMask,
)
-from ess.snspowder import powgen
@pytest.fixture()
diff --git a/tox.ini b/tox.ini
index 87e882ee..5abdaa9b 100644
--- a/tox.ini
+++ b/tox.ini
@@ -21,7 +21,11 @@ commands = pytest {posargs}
[testenv:docs]
description = invoke sphinx-build to build the HTML docs
-deps = -r requirements/docs.txt
+# setuptools is required by sphinxcontrib-bibtex but not allowed in
+# requirements files. So we install it explicitly here.
+deps =
+ setuptools
+ -r requirements/docs.txt
allowlist_externals=find
commands = python -m sphinx -j2 -v -b html -d {toxworkdir}/docs_doctrees docs html
python -m sphinx -j2 -v -b doctest -d {toxworkdir}/docs_doctrees docs html