Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 15 additions & 11 deletions src/ess/amor/conversions.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,23 +149,27 @@ def add_masks(
zlims: ZIndexLimits,
bdlim: BeamDivergenceLimits,
wbins: WavelengthBins,
):
) -> sc.DataArray:
"""
Masks the data by ranges in the detector
coordinates ``z`` and ``y``, and by the divergence of the beam,
and by wavelength.
"""
da.masks["stripe_range"] = _not_between(da.coords["stripe"], *ylim)
da.masks['z_range'] = _not_between(da.coords["z_index"], *zlims)
da.bins.masks["divergence_too_large"] = _not_between(
da.bins.coords["angle_of_divergence"],
bdlim[0].to(unit=da.bins.coords["angle_of_divergence"].bins.unit),
bdlim[1].to(unit=da.bins.coords["angle_of_divergence"].bins.unit),
da = da.assign_masks(
stripe_range=_not_between(da.coords["stripe"], *ylim),
z_range=_not_between(da.coords["z_index"], *zlims),
)
da.bins.masks['wavelength'] = _not_between(
da.bins.coords['wavelength'],
wbins[0],
wbins[-1],
da = da.bins.assign_masks(
divergence_too_large=_not_between(
da.bins.coords["angle_of_divergence"],
bdlim[0].to(unit=da.bins.coords["angle_of_divergence"].bins.unit),
bdlim[1].to(unit=da.bins.coords["angle_of_divergence"].bins.unit),
),
wavelength=_not_between(
da.bins.coords['wavelength'],
wbins[0],
wbins[-1],
),
)
return da

Expand Down
20 changes: 18 additions & 2 deletions src/ess/amor/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
Filename,
LoadedNeXusDetector,
NeXusDetectorName,
ProtonCurrent,
RawDetectorData,
RunType,
SampleRotation,
Expand Down Expand Up @@ -43,10 +44,15 @@ def load_events(
beam_size: BeamSize[RunType],
angle_to_center_of_beam: AngleCenterOfIncomingToHorizon[RunType],
) -> RawDetectorData[RunType]:
event_data = detector["data"]
if 'event_time_zero' in event_data.coords:
event_data.bins.coords['event_time_zero'] = sc.bins_like(
event_data, fill_value=event_data.coords['event_time_zero']
)

detector_numbers = pixel_coordinates_in_detector_system()
data = (
detector["data"]
.bins.constituents["data"]
event_data.bins.constituents["data"]
.group(detector_numbers.data.flatten(to='event_id'))
.fold("event_id", sizes=detector_numbers.sizes)
)
Expand Down Expand Up @@ -126,6 +132,15 @@ def load_amor_angle_from_horizon_to_center_of_incident_beam(
)


def load_amor_proton_current(
fp: Filename[RunType],
) -> ProtonCurrent[RunType]:
(pc,) = load_nx(fp, 'NXentry/NXinstrument/NXdetector/proton_current')
pc = pc['value']['dim_1', 0]
pc.data.unit = 'mA/s'
return pc


providers = (
load_detector,
load_events,
Expand All @@ -136,5 +151,6 @@ def load_amor_angle_from_horizon_to_center_of_incident_beam(
load_amor_sample_rotation,
load_amor_detector_rotation,
load_amor_angle_from_horizon_to_center_of_incident_beam,
load_amor_proton_current,
amor_chopper,
)
20 changes: 17 additions & 3 deletions src/ess/amor/workflow.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
from ..reflectometry.corrections import correct_by_footprint
from ..reflectometry.conversions import (
add_proton_current_coord,
add_proton_current_mask,
)
from ..reflectometry.corrections import correct_by_footprint, correct_by_proton_current
from ..reflectometry.types import (
BeamDivergenceLimits,
ProtonCurrent,
RawDetectorData,
ReducibleData,
RunType,
Expand All @@ -18,6 +23,7 @@ def add_coords_masks_and_apply_corrections(
zlims: ZIndexLimits,
bdlim: BeamDivergenceLimits,
wbins: WavelengthBins,
proton_current: ProtonCurrent[RunType],
graph: CoordTransformationGraph,
) -> ReducibleData[RunType]:
"""
Expand All @@ -26,8 +32,16 @@ def add_coords_masks_and_apply_corrections(
"""
da = add_coords(da, graph)
da = add_masks(da, ylim, zlims, bdlim, wbins)
correct_by_footprint(da)
return da

da = correct_by_footprint(da)

# For some older Amor files there are no entries in the proton current log
if len(proton_current) != 0:
da = add_proton_current_coord(da, proton_current)
da = add_proton_current_mask(da)
da = correct_by_proton_current(da)

return ReducibleData[RunType](da)


providers = (add_coords_masks_and_apply_corrections,)
35 changes: 35 additions & 0 deletions src/ess/reflectometry/conversions.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
from scipp.constants import pi
from scippneutron._utils import elem_dtype

from .types import ProtonCurrent, RunType


def reflectometry_q(wavelength: sc.Variable, theta: sc.Variable) -> sc.Variable:
"""
Expand All @@ -26,4 +28,37 @@ def reflectometry_q(wavelength: sc.Variable, theta: sc.Variable) -> sc.Variable:
return c * sc.sin(theta.astype(dtype, copy=False)) / wavelength


def add_proton_current_coord(
da: sc.DataArray,
pc: ProtonCurrent[RunType],
) -> sc.DataArray:
"""Find the proton current value for each event and
adds it as a coord to the data array."""
pc_lookup = sc.lookup(
pc,
dim='time',
mode='previous',
fill_value=sc.scalar(float('nan'), unit=pc.unit),
)
# Useful for comparing the proton current to what is typical
da = da.assign_coords(median_proton_current=sc.median(pc).data)
da.coords.set_aligned('median_proton_current', False)
da = da.bins.assign_coords(
proton_current=pc_lookup(da.bins.coords['event_time_zero'])
)
return da


def add_proton_current_mask(da: sc.DataArray) -> sc.DataArray:
"""Masks events where the proton current was too low or where
the proton current is nan."""
# Take inverse and use >= because we want to mask nan values
da = da.bins.assign_masks(
proton_current_too_low=~(
da.bins.coords['proton_current'] >= da.coords['median_proton_current'] / 4
)
)
return da


providers = ()
9 changes: 7 additions & 2 deletions src/ess/reflectometry/corrections.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,15 @@ def footprint_on_sample(
return sc.erf(fwhm_to_std(sample_size / size_of_beam_on_sample))


def correct_by_footprint(da: sc.DataArray) -> None:
def correct_by_footprint(da: sc.DataArray) -> sc.DataArray:
"Corrects the data by the size of the footprint on the sample."
da /= footprint_on_sample(
return da / footprint_on_sample(
da.bins.coords['theta'],
da.coords['beam_size'],
da.coords['sample_size'],
)


def correct_by_proton_current(da: sc.DataArray) -> sc.DataArray:
"Corrects the data by the proton current during the time of data collection"
return da / da.bins.coords['proton_current']
4 changes: 4 additions & 0 deletions src/ess/reflectometry/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,10 @@ class SampleSize(sciline.Scope[RunType, sc.Variable], sc.Variable):
"""Size of the sample. If None it is assumed to be the same as the reference."""


class ProtonCurrent(sciline.Scope[RunType, sc.DataArray], sc.DataArray):
"""Proton current log from file"""


YIndexLimits = NewType("YIndexLimits", tuple[sc.Variable, sc.Variable])
"""Limit of the (logical) 'y' detector pixel index"""

Expand Down
43 changes: 43 additions & 0 deletions tests/amor/pipeline_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@
from ess.reflectometry import orso
from ess.reflectometry.types import (
Filename,
ProtonCurrent,
QBins,
ReducibleData,
ReferenceRun,
ReflectivityOverQ,
SampleRotation,
Expand Down Expand Up @@ -127,3 +129,44 @@ def test_pipeline_merging_events_result_unchanged(amor_pipeline: sciline.Pipelin
assert_allclose(
2 * sc.variances(result.data), sc.variances(result2.data), rtol=sc.scalar(1e-6)
)


@pytest.mark.filterwarnings("ignore:Failed to convert .* into a transformation")
@pytest.mark.filterwarnings("ignore:Invalid transformation, missing attribute")
def test_proton_current(amor_pipeline: sciline.Pipeline):
amor_pipeline[Filename[SampleRun]] = amor.data.amor_sample_run(611)
da_without_proton_current = amor_pipeline.compute(ReducibleData[SampleRun])

proton_current = [1, 2, 0.1]
timestamps = [1699883542349602112, 1699883542349602112, 1699886864071691036]
amor_pipeline[ProtonCurrent[SampleRun]] = sc.DataArray(
sc.array(dims=['time'], values=proton_current),
coords={
'time': sc.array(
dims=['time'],
values=timestamps,
dtype='datetime64',
unit='ns',
)
},
)
da_with_proton_current = amor_pipeline.compute(ReducibleData[SampleRun])

assert "proton_current" in da_with_proton_current.bins.coords
assert "proton_current_too_low" in da_with_proton_current.bins.masks
assert da_with_proton_current.bins.masks["proton_current_too_low"].any()
assert not da_with_proton_current.bins.masks["proton_current_too_low"].all()

assert "proton_current" not in da_without_proton_current.bins.coords
assert "proton_current_too_low" not in da_without_proton_current.bins.masks

t = (
da_with_proton_current.bins.constituents['data']
.coords['event_time_zero'][0]
.value.astype('uint64')
)
w_with = da_with_proton_current.bins.constituents['data'].data[0].value
w_without = da_without_proton_current.bins.constituents['data'].data[0].value
np.testing.assert_allclose(
proton_current[np.searchsorted(timestamps, t) - 1], w_without / w_with
)