Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 48 additions & 11 deletions src/ess/amor/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,13 @@
from ..reflectometry.load import load_nx
from ..reflectometry.types import (
DetectorRotation,
EventData,
Filename,
LoadedNeXusDetector,
NeXusDetectorName,
ProtonCurrent,
RawDetectorData,
RawEventData,
ReducibleDetectorData,
RunType,
SampleRotation,
Expand All @@ -31,8 +34,23 @@ def load_detector(
return nexus.load_detector(file_path=file_path, detector_name=detector_name)


def load_events_binned_in_time(
detector: LoadedNeXusDetector[RunType],
) -> RawEventData[RunType]:
da = nexus.extract_detector_data(detector)
# Recent versions of scippnexus no longer add variances for events by default, so
# we add them here if they are missing.
if da.bins.constituents["data"].data.variances is None:
da.bins.constituents["data"].data.variances = da.bins.constituents[
"data"
].data.values
return RawEventData[RunType](da)


def load_events(
detector: LoadedNeXusDetector[RunType], detector_rotation: DetectorRotation[RunType]
event_data: EventData[RunType],
detector: LoadedNeXusDetector[RunType],
detector_rotation: DetectorRotation[RunType],
) -> RawDetectorData[RunType]:
detector_numbers = sc.arange(
"event_id",
Expand All @@ -42,9 +60,8 @@ def load_events(
dtype="int32",
)
data = (
nexus.extract_detector_data(detector)
.bins.constituents["data"]
.group(detector_numbers)
event_data.bins.concat()
.value.group(detector_numbers)
.fold(
"event_id",
sizes={
Expand All @@ -54,13 +71,6 @@ def load_events(
},
)
)
# Recent versions of scippnexus no longer add variances for events by default, so
# we add them here if they are missing.
if data.bins.constituents["data"].data.variances is None:
data.bins.constituents["data"].data.variances = data.bins.constituents[
"data"
].data.values

pixel_inds = sc.array(dims=data.dims, values=data.coords["event_id"].values - 1)
position, angle_from_center_of_beam = pixel_coordinate_in_lab_frame(
pixelID=pixel_inds, nu=detector_rotation
Expand Down Expand Up @@ -156,9 +166,35 @@ def load_amor_detector_rotation(fp: Filename[RunType]) -> DetectorRotation[RunTy
return sc.scalar(nu['value'].data['dim_1', 0]['time', 0].value, unit='deg')


def load_proton_current(
detector: LoadedNeXusDetector[RunType],
) -> ProtonCurrent[RunType]:
pc = detector['proton_current']['value']
if pc.size == 0:
# If the proton current log is empty, fill it with a default value
# This is to continue support for old amor files that don't contain
# information about the proton current.
pc = ProtonCurrent[RunType](
sc.DataArray(
data=sc.array(
dims=['time', 'dim_1'],
values=[[1.0]],
),
coords={
'time': sc.array(
dims=['time'], values=[0], dtype='datetime64', unit='ns'
)
},
)
)
pc.unit = pc.unit or 'dimensionless'
return ProtonCurrent[RunType](pc['dim_1', 0])


providers = (
load_detector,
load_events,
load_events_binned_in_time,
compute_tof,
load_amor_ch_frequency,
load_amor_ch_phase,
Expand All @@ -167,4 +203,5 @@ def load_amor_detector_rotation(fp: Filename[RunType]) -> DetectorRotation[RunTy
load_amor_sample_rotation,
load_amor_detector_rotation,
amor_chopper,
load_proton_current,
)
10 changes: 5 additions & 5 deletions src/ess/amor/resolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@

from ..reflectometry.tools import fwhm_to_std
from ..reflectometry.types import (
CorrectedData,
DetectorSpatialResolution,
FootprintCorrectedData,
QBins,
QResolution,
SampleRun,
Expand All @@ -21,7 +21,7 @@


def wavelength_resolution(
da: FootprintCorrectedData[SampleRun],
da: CorrectedData[SampleRun],
chopper_1_position: Chopper1Position[SampleRun],
chopper_2_position: Chopper2Position[SampleRun],
) -> WavelengthResolution:
Expand Down Expand Up @@ -53,7 +53,7 @@ def wavelength_resolution(


def sample_size_resolution(
da: FootprintCorrectedData[SampleRun],
da: CorrectedData[SampleRun],
sample_size: SampleSize[SampleRun],
) -> SampleSizeResolution:
"""
Expand Down Expand Up @@ -84,7 +84,7 @@ def sample_size_resolution(


def angular_resolution(
da: FootprintCorrectedData[SampleRun],
da: CorrectedData[SampleRun],
detector_spatial_resolution: DetectorSpatialResolution[SampleRun],
) -> AngularResolution:
"""
Expand Down Expand Up @@ -126,7 +126,7 @@ def angular_resolution(


def sigma_Q(
da: FootprintCorrectedData[SampleRun],
da: CorrectedData[SampleRun],
angular_resolution: AngularResolution,
wavelength_resolution: WavelengthResolution,
sample_size_resolution: SampleSizeResolution,
Expand Down
20 changes: 19 additions & 1 deletion src/ess/reflectometry/conversions.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,12 @@
from .types import (
BeamDivergenceLimits,
DataWithScatteringCoordinates,
EventData,
Gravity,
IncidentBeam,
MaskedData,
ProtonCurrent,
RawEventData,
ReducibleDetectorData,
RunType,
SamplePosition,
Expand Down Expand Up @@ -163,11 +166,26 @@ def add_masks(
da.masks["z_index_range"] = (da.coords["z_index"] < zlim[0]) | (
da.coords["z_index"] > zlim[1]
)
return da
return MaskedData[RunType](da)


def add_masks_pulse_dependent(
da: RawEventData[RunType],
pc: ProtonCurrent[RunType],
) -> EventData[RunType]:
typical_current = sc.median(pc).data
pc_lookup = sc.lookup(
pc, dim='time', mode='previous', fill_value=sc.scalar(float('nan'))
)
da.masks["proton_current_too_low"] = (
pc_lookup(da.coords['event_time_zero']) < typical_current / 2
)
return EventData[RunType](da)


providers = (
add_masks,
add_masks_pulse_dependent,
add_coords,
specular_reflection,
)
45 changes: 39 additions & 6 deletions src/ess/reflectometry/corrections.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,14 @@
from .tools import fwhm_to_std
from .types import (
BeamSize,
FootprintCorrectedData,
CorrectedData,
EventData,
FootprintCorrection,
IdealReferenceIntensity,
MaskedData,
ProtonCurrent,
ProtonCurrentCorrection,
RawEventData,
ReferenceIntensity,
ReferenceRun,
RunType,
Expand All @@ -22,7 +27,7 @@ def footprint_correction(
data_array: MaskedData[RunType],
beam_size: BeamSize[RunType],
sample_size: SampleSize[RunType],
) -> FootprintCorrectedData[RunType]:
) -> FootprintCorrection[RunType]:
"""
Corrects the event weights by the fraction of the beam hitting the sample.
Depends on :math:`\\theta`.
Expand All @@ -44,12 +49,11 @@ def footprint_correction(
"""
size_of_beam_on_sample = beam_size / sc.sin(data_array.bins.coords["theta"])
footprint_scale = sc.erf(fwhm_to_std(sample_size / size_of_beam_on_sample))
data_array_fp_correction = data_array / footprint_scale
return FootprintCorrectedData[RunType](data_array_fp_correction)
return FootprintCorrection[RunType](1 / footprint_scale)


def compute_reference_intensity(
da: FootprintCorrectedData[ReferenceRun], wb: WavelengthBins
da: CorrectedData[ReferenceRun], wb: WavelengthBins
) -> ReferenceIntensity:
"""Creates a reference intensity map over (z_index, wavelength).
Rationale:
Expand All @@ -59,7 +63,7 @@ def compute_reference_intensity(
"""
b = da.bin(wavelength=wb, dim=set(da.dims) - set(da.coords["z_index"].dims))
h = b.hist()
h.masks["too_few_events"] = h.data < sc.scalar(1, unit="counts")
h.masks["too_few_events"] = sc.stddevs(h.data) >= sc.values(h.data) / 2
# Add a Q coordinate to each bin, the Q is not completely unique in every bin,
# but it is close enough.
h.coords["Q"] = b.bins.coords["Q"].bins.mean()
Expand All @@ -74,8 +78,37 @@ def calibrate_reference(
return IdealReferenceIntensity(da * cal)


def proton_current_correction(
pc: ProtonCurrent[RunType], da: RawEventData[RunType]
) -> ProtonCurrentCorrection[RunType]:
pcl = sc.lookup(pc, dim='time', mode='previous', fill_value=sc.scalar(float('nan')))
proton_current_at_pulses = pcl(da.coords['event_time_zero'])
return ProtonCurrentCorrection[RunType](1 / proton_current_at_pulses)


def correct_detector_binned_events(
da: MaskedData[RunType], fp: FootprintCorrection[RunType]
) -> CorrectedData[RunType]:
return CorrectedData[RunType](da * fp)


def correct_pulse_binned_events(
da: RawEventData[RunType], pcc: ProtonCurrentCorrection[RunType]
) -> EventData[RunType]:
return EventData[RunType](da)
# If the proton current varies by a factor more than cutoff from the median
# it is considered a bad pulse.
cutoff = 5
da.masks['too_low_proton_current'] = pcc > cutoff * sc.nanmedian(pcc)
da.masks['undefined_proton_current'] = sc.isnan(pcc)
return EventData[RunType](da * pcc)


providers = (
footprint_correction,
calibrate_reference,
compute_reference_intensity,
proton_current_correction,
correct_detector_binned_events,
correct_pulse_binned_events,
)
30 changes: 16 additions & 14 deletions src/ess/reflectometry/normalize.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from scipy.optimize import OptimizeWarning

from .types import (
FootprintCorrectedData,
CorrectedData,
IdealReferenceIntensity,
NormalizationFactor,
NormalizedIofQ,
Expand All @@ -18,7 +18,7 @@


def normalization_factor(
da: FootprintCorrectedData[SampleRun],
da: CorrectedData[SampleRun],
corr: IdealReferenceIntensity,
wbins: WavelengthBins,
) -> NormalizationFactor:
Expand Down Expand Up @@ -77,21 +77,23 @@ def q_of_z_wavelength(wavelength, a, b):
),
p0={"a": sc.scalar(-1e-3, unit="1/angstrom")},
)
return sc.DataArray(
data=corr.data,
coords={
"Q": q_of_z_wavelength(
wm,
sc.values(p["a"]),
sc.values(p["b"]),
).data,
},
masks=corr.masks,
return NormalizationFactor(
sc.DataArray(
data=corr.data,
coords={
"Q": q_of_z_wavelength(
wm,
sc.values(p["a"]),
sc.values(p["b"]),
).data,
},
masks=corr.masks,
)
)


def reflectivity_over_q(
da: FootprintCorrectedData[SampleRun],
da: CorrectedData[SampleRun],
n: NormalizationFactor,
qbins: QBins,
) -> NormalizedIofQ:
Expand All @@ -116,7 +118,7 @@ def reflectivity_over_q(


def reflectivity_per_event(
da: FootprintCorrectedData[SampleRun],
da: CorrectedData[SampleRun],
n: IdealReferenceIntensity,
wbins: WavelengthBins,
) -> ReflectivityData:
Expand Down
4 changes: 2 additions & 2 deletions src/ess/reflectometry/orso.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
from .load import load_nx
from .supermirror import SupermirrorReflectivityCorrection
from .types import (
CorrectedData,
Filename,
FootprintCorrectedData,
ReducibleDetectorData,
ReferenceRun,
SampleRun,
Expand Down Expand Up @@ -175,7 +175,7 @@ def build_orso_data_source(

_CORRECTIONS_BY_GRAPH_KEY = {
ReducibleDetectorData[SampleRun]: "chopper ToF correction",
FootprintCorrectedData[SampleRun]: "footprint correction",
CorrectedData[SampleRun]: "footprint correction",
SupermirrorReflectivityCorrection: "supermirror calibration",
}

Expand Down
Loading
Loading