Skip to content

Commit

Permalink
Merge 76a38c7 into 5fa734a
Browse files Browse the repository at this point in the history
  • Loading branch information
michaweiss89 committed May 11, 2023
2 parents 5fa734a + 76a38c7 commit 2d834da
Show file tree
Hide file tree
Showing 7 changed files with 224 additions and 183 deletions.
2 changes: 1 addition & 1 deletion straxen/plugins/events/event_positions.py
Expand Up @@ -64,7 +64,7 @@ def infer_dtype(self):
for j in ['z']:
comment = 'Interaction z-position, using mean drift velocity only (cm)'
dtype += [(j, np.float32, comment)]
comment = 'Interaction z-position corrected to non-uniform drift velocity [ cm ]'
comment = 'Interaction z-position corrected to non-uniform drift velocity (cm)'
dtype += [(j + "_dv_corr", np.float32, comment)]
for s_i in [1, 2]:
comment = f'Alternative S{s_i} z-position (rel. main S{int(2 * (1.5 - s_i) + s_i)}), using mean drift velocity only (cm)'
Expand Down
120 changes: 76 additions & 44 deletions straxen/plugins/events/multi_scatter.py
Expand Up @@ -3,47 +3,31 @@
import numpy as np
export, __all__ = strax.exporter()


@export
class EventInfoMS(strax.Plugin):
"""
Get the sum of s2 and s1,
Get the sum of cs2 inside the drift length.
Plugin to collect multiple-scatter event observables
"""
__version__ = '0.0.1'
depends_on = ('event_info', 'peak_basics','peaks_per_event','peaks_corrections')
provides = 'event_MS_naive'

def infer_dtype(self):
dtype = []
dtype += [
((f's1_sum'), np.float32),
((f'cs1_multi'), np.float32),
((f'cs1_multi_wo_timecorr'), np.float32),
((f's2_sum'), np.float32),
((f'cs2_sum'), np.float32),
((f'cs2_wo_timecorr_sum'), np.float32),
((f'cs2_wo_elifecorr_sum'), np.float32),
((f'cs2_area_fraction_sum'), np.float32),
((f'ces_sum'), np.float32),
((f'e_charge_sum'), np.float32),
]
dtype += strax.time_fields
return dtype

__version__ = '0.0.2'
depends_on = (
'event_info',
'peak_basics', 'peak_per_event', 'peak_corrections', 'peak_positions')
provides = 'event_ms_naive'
save_when = strax.SaveWhen.TARGET

# config options don't double cache things from the resource cache!
g1 = straxen.URLConfig(
default='bodega://g1?bodega_version=v2',
help="S1 gain in PE / photons produced",
help='S1 gain in PE / photons produced',
)
g2 = straxen.URLConfig(
default='bodega://g2?bodega_version=v2',
help="S2 gain in PE / electrons produced",
help='S2 gain in PE / electrons produced',
)
lxe_w = straxen.URLConfig(
default=13.7e-3,
help="LXe work function in quanta/keV"
help='LXe work function in quanta/keV'
)
electron_drift_velocity = straxen.URLConfig(
default='cmt://'
Expand All @@ -55,37 +39,85 @@ def infer_dtype(self):
max_drift_length = straxen.URLConfig(
default=straxen.tpc_z, infer_type=False,
help='Total length of the TPC from the bottom of gate to the '
'top of cathode wires [cm]', )
'top of cathode wires [cm]')

ms_window_fac = straxen.URLConfig(
default=1.01, type=(int, float),
help='Max drift time window to look for peaks in multiple scatter events'
)

def infer_dtype(self):
dtype = strax.time_fields + [
(('Sum of S1 areas in event',
's1_sum'), np.float32),
(('Corrected S1 area based on average position of S2s in event',
'cs1_multi'), np.float32),
(('Corrected S1 area based on average position of S2s in event before time-dep LY correction',
'cs1_multi_wo_timecorr'), np.float32),
(('Sum of S2 areas in event',
's2_sum'), np.float32),
(('Sum of corrected S2 areas in event',
'cs2_sum'), np.float32),
(('Sum of corrected S2 areas in event S2 before elife correction',
'cs2_wo_timecorr_sum'), np.float32),
(('Sum of corrected S2 areas in event before SEG/EE and elife corrections',
'cs2_wo_elifecorr_sum'), np.float32),
(('Average of S2 area fraction top in event',
'cs2_area_fraction_top_avg'), np.float32),
(('Sum of the energy estimates in event',
'ces_sum'), np.float32),
(('Sum of the charge estimates in event',
'e_charge_sum'), np.float32),
(('Average x position of S2s in event',
'x_avg'), np.float32),
(('Average y position of S2s in event',
'y_avg'), np.float32),
(('Average observed z position of energy deposits in event',
'z_obs_avg'), np.float32),
(('Number of S2s in event',
'multiplicity'), np.int32),
]
return dtype

def setup(self):
self.drift_time_max = int(self.max_drift_length / self.electron_drift_velocity)

def cs1_to_e(self, x):
return self.lxe_w * x / self.g1

def cs2_to_e(self, x):
return self.lxe_w * x / self.g2


def compute(self, events, peaks):
split_peaks = strax.split_by_containment(peaks, events)
result = np.zeros(len(events), self.infer_dtype())
#result['s2_sum'] = np.zeros(len(events))

#1. Assign peaks features to main S1 and main S2 in the event

# Assign peaks features to main S1 and main S2 in the event
for event_i, (event, sp) in enumerate(zip(events, split_peaks)):
cond = (sp["type"]==2)&(sp["drift_time"]>0)&(sp["drift_time"]< 1.01 * self.drift_time_max)&(sp["cs2"]>0)
result[f's2_sum'][event_i] = np.sum(sp[cond]['area'])
result[f'cs2_sum'][event_i] = np.sum(sp[cond]['cs2'])
result[f'cs2_wo_timecorr_sum'][event_i] = np.sum(sp[cond]['cs2_wo_timecorr'])
result[f'cs2_wo_elifecorr_sum'][event_i] = np.sum(sp[cond]['cs2_wo_elifecorr'])
result[f'cs2_area_fraction_sum'][event_i] = np.sum(sp[cond]['cs2_area_fraction_top'])
result[f's1_sum'][event_i] = np.sum(sp[sp["type"]==1]['area'])
cond = (sp['type'] == 2) & (sp['drift_time'] > 0)
cond &= (sp['drift_time'] < self.ms_window_fac * self.drift_time_max) & (sp['cs2'] > 0)
result['s2_sum'][event_i] = np.nansum(sp[cond]['area'])
result['cs2_sum'][event_i] = np.nansum(sp[cond]['cs2'])
result['cs2_wo_timecorr_sum'][event_i] = np.nansum(sp[cond]['cs2_wo_timecorr'])
result['cs2_wo_elifecorr_sum'][event_i] = np.nansum(sp[cond]['cs2_wo_elifecorr'])
result['s1_sum'][event_i] = np.nansum(sp[sp['type'] == 1]['area'])

if np.sum(sp[cond]['cs2']) > 0:
result[f'cs1_multi_wo_timecorr'][event_i] = event["s1_area"] * np.average(sp[cond]['s1_xyz_correction_factor'], weights = sp[cond]['cs2'])
result[f'cs1_multi'][event_i] = result[f'cs1_multi_wo_timecorr'][event_i] * np.average(sp[cond]['s1_rel_light_yield_correction_factor'], weights = sp[cond]['cs2'])
el = self.cs1_to_e(result[f'cs1_multi'])
ec = self.cs2_to_e(result[f'cs2_sum'])
result[f'ces_sum'] = el+ec
result[f'e_charge_sum'] = ec
result['cs1_multi_wo_timecorr'][event_i] = event['s1_area'] * np.average(
sp[cond]['s1_xyz_correction_factor'], weights=sp[cond]['cs2'])
result['cs1_multi'][event_i] = result['cs1_multi_wo_timecorr'][event_i] * np.average(
sp[cond]['s1_rel_light_yield_correction_factor'], weights=sp[cond]['cs2'])
result['x_avg'][event_i] = np.average(sp[cond]['x'], weights=sp[cond]['cs2'])
result['y_avg'][event_i] = np.average(sp[cond]['y'], weights=sp[cond]['cs2'])
result['z_obs_avg'][event_i] = np.average(sp[cond]['z_obs_ms'], weights=sp[cond]['cs2'])
result['cs2_area_fraction_top_avg'][event_i] = np.average(
sp[cond]['cs2_area_fraction_top'], weights=sp[cond]['cs2'])
result['multiplicity'][event_i] = len(sp[cond]['area'])

el = self.cs1_to_e(result['cs1_multi'])
ec = self.cs2_to_e(result['cs2_sum'])
result['ces_sum'] = el + ec
result['e_charge_sum'] = ec
result['time'] = events['time']
result['endtime'] = strax.endtime(events)
return result
8 changes: 4 additions & 4 deletions straxen/plugins/peaks/__init__.py
Expand Up @@ -34,11 +34,11 @@
from . import peaks
from .peaks import *

from . import peaks_per_event
from .peaks_per_event import *
from . import peak_per_event
from .peak_per_event import *

from . import peaks_corrections
from .peaks_corrections import *
from . import peak_corrections
from .peak_corrections import *

from . import peaks_subtyping
from .peaks_subtyping import *
129 changes: 129 additions & 0 deletions straxen/plugins/peaks/peak_corrections.py
@@ -0,0 +1,129 @@
from straxen.plugins.events.corrected_areas import CorrectedAreas
import strax
import numpy as np
import straxen

export, __all__ = strax.exporter()


@export
class PeakCorrectedAreas(CorrectedAreas):
"""
Pluging to apply corrections on peak level assuming that the main
S1 is the only physical S1.
"""
__version__ = '0.0.1'

depends_on = ('peak_basics', 'peak_positions', 'peak_per_event')
data_kind = 'peaks'
provides = 'peak_corrections'

electron_drift_velocity = straxen.URLConfig(
default='cmt://'
'electron_drift_velocity'
'?version=ONLINE&run_id=plugin.run_id',
cache=True,
help='Vertical electron drift velocity in cm/ns (1e4 m/ms)'
)

electron_drift_time_gate = straxen.URLConfig(
default='cmt://'
'electron_drift_time_gate'
'?version=ONLINE&run_id=plugin.run_id',
help='Electron drift time from the gate in ns',
cache=True)

def infer_dtype(self):
dtype = strax.time_fields + [
(('Corrected area of S2 before elife correction '
'(s2 xy correction + SEG/EE correction applied) [PE]',
'cs2_wo_elifecorr'), np.float32),
(('Corrected area of S2 before SEG/EE and elife corrections '
'(s2 xy correction applied) [PE]',
'cs2_wo_timecorr'), np.float32),
(('Fraction of area seen by the top PMT array for corrected S2',
'cs2_area_fraction_top'), np.float32),
(('Corrected area of S2 in the bottom PMT array [PE]',
'cs2_bottom'), np.float32),
(('Corrected area of S2 [PE]', 'cs2'), np.float32),
(('Correction factor for the S1 area based on S2 position',
's1_xyz_correction_factor'), np.float32),
(('Relative light yield correction factor for the S1 area',
's1_rel_light_yield_correction_factor'), np.float32),
(('z position of the multiscatter peak',
'z_obs_ms'), np.float32),
]
return dtype

def compute(self, peaks):
result = np.zeros(len(peaks), self.dtype)
result['time'] = peaks['time']
result['endtime'] = peaks['endtime']

# Get z position of the peak
z_obs = -self.electron_drift_velocity * peaks['drift_time']
z_obs = z_obs + self.electron_drift_velocity * self.electron_drift_time_gate
result['z_obs_ms'] = z_obs

# Get S1 correction factors
peak_positions = np.vstack([peaks['x'], peaks['y'], z_obs]).T
result['s1_xyz_correction_factor'] = 1 / self.s1_xyz_map(peak_positions)
result['s1_rel_light_yield_correction_factor'] = 1 / self.rel_light_yield

# s2 corrections
s2_top_map_name, s2_bottom_map_name = self.s2_map_names()

seg, avg_seg, ee = self.seg_ee_correction_preparation()

# now can start doing corrections

# S2(x,y) corrections use the observed S2 positions
s2_positions = np.vstack([peaks['x'], peaks['y']]).T

# corrected s2 with s2 xy map only, i.e. no elife correction
# this is for s2-only events which don't have drift time info

cs2_top_xycorr = (peaks['area']
* peaks['area_fraction_top']
/ self.s2_xy_map(s2_positions, map_name=s2_top_map_name))
cs2_bottom_xycorr = (peaks['area']
* (1 - peaks['area_fraction_top'])
/ self.s2_xy_map(s2_positions, map_name=s2_bottom_map_name))

# For electron lifetime corrections to the S2s,
# use drift time computed using the main S1.

elife_correction = np.exp(peaks['drift_time'] / self.elife)
result['cs2_wo_timecorr'] = ((cs2_top_xycorr + cs2_bottom_xycorr) * elife_correction)

for partition, func in self.regions.items():
# partitioned SE and EE
partition_mask = func(peaks['x'], peaks['y'])

# Correct for SEgain and extraction efficiency
seg_ee_corr = seg[partition] / avg_seg[partition] * ee[partition]

# note that these are already masked!
cs2_top_wo_elifecorr = cs2_top_xycorr[partition_mask] / seg_ee_corr
cs2_bottom_wo_elifecorr = cs2_bottom_xycorr[partition_mask] / seg_ee_corr

result['cs2_wo_elifecorr'][partition_mask] = cs2_top_wo_elifecorr + cs2_bottom_wo_elifecorr

# cs2aft doesn't need elife/time corrections as they cancel
result['cs2_area_fraction_top'][partition_mask] = cs2_top_wo_elifecorr / (
cs2_top_wo_elifecorr + cs2_bottom_wo_elifecorr)

result['cs2'][partition_mask] = result['cs2_wo_elifecorr'][
partition_mask] * elife_correction[partition_mask]
result['cs2_bottom'][partition_mask] = cs2_bottom_wo_elifecorr * elife_correction[partition_mask]

not_s2_mask = peaks['type'] != 2
result['cs2_wo_timecorr'][not_s2_mask] = np.nan
result['cs2_wo_elifecorr'][not_s2_mask] = np.nan
result['cs2_area_fraction_top'][not_s2_mask] = np.nan
result['cs2'][not_s2_mask] = np.nan
result['z_obs_ms'][not_s2_mask] = np.nan
result['cs2_bottom'][not_s2_mask] = np.nan
result['s1_xyz_correction_factor'][not_s2_mask] = np.nan
result['s1_rel_light_yield_correction_factor'][not_s2_mask] = np.nan
return result
Expand Up @@ -12,31 +12,29 @@ class EventPeaks(strax.Plugin):
Link - https://xe1t-wiki.lngs.infn.it/doku.php?id=weiss:analysis:ms_plugin
"""
__version__ = '0.0.1'
depends_on = ('event_basics', 'peak_basics','peak_positions')
provides = 'peaks_per_event'
depends_on = ('event_basics', 'peak_basics', 'peak_positions')
provides = 'peak_per_event'
data_kind = 'peaks'
save_when = strax.SaveWhen.TARGET

def infer_dtype(self):
dtype = [
dtype = strax.time_fields + [
('drift_time', np.float32, 'Drift time between main S1 and S2 in ns'),
('event_number', np.int64, 'Event number in this dataset'),
]
dtype += strax.time_fields
]
return dtype

save_when = strax.SaveWhen.TARGET

def compute(self, events, peaks):
split_peaks = strax.split_by_containment(peaks, events)
split_peaks_ind = strax.fully_contained_in(peaks, events)
result = np.zeros(len(peaks), self.dtype)
result.fill(np.nan)
#1. Assign peaks features to main S1 and main S2 in the event

# Assign peaks features to main S1 and main S2 in the event
for event_i, (event, sp) in enumerate(zip(events, split_peaks)):
result[f'drift_time'][(split_peaks_ind==event_i)] = sp[f'center_time']-event[f's1_center_time']
result[f'event_number']= split_peaks_ind
result[f'drift_time'][peaks["type"]!=2]= np.nan
result['drift_time'][split_peaks_ind==event_i] = sp['center_time'] - event['s1_center_time']
result['event_number'] = split_peaks_ind
result['drift_time'][peaks['type'] != 2] = np.nan
result['time'] = peaks['time']
result['endtime'] = strax.endtime(peaks)
return result

0 comments on commit 2d834da

Please sign in to comment.