Skip to content

Commit

Permalink
Merge 59ad229 into b1a0884
Browse files Browse the repository at this point in the history
  • Loading branch information
dachengx committed May 2, 2023
2 parents b1a0884 + 59ad229 commit 6f62f87
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 18 deletions.
25 changes: 13 additions & 12 deletions straxen/plugins/peaklets/peaklets.py
Expand Up @@ -37,7 +37,7 @@ class Peaklets(strax.Plugin):
parallel = 'process'
compressor = 'zstd'

__version__ = '1.0.1'
__version__ = '1.0.2'

peaklet_gap_threshold = straxen.URLConfig(
default=700, infer_type=False,
Expand Down Expand Up @@ -229,8 +229,7 @@ def compute(self, records, start, end):
hitlets = hits
del hits

hitlet_time_shift = (hitlets['left'] - hitlets['left_integration']) * hitlets['dt']
hitlets['time'] = hitlets['time'] - hitlet_time_shift
hitlets['time'] -= (hitlets['left'] - hitlets['left_integration']) * hitlets['dt']
hitlets['length'] = (hitlets['right_integration'] - hitlets['left_integration'])
hitlets = strax.sort_by_time(hitlets)
rlinks = strax.record_links(records)
Expand Down Expand Up @@ -275,22 +274,24 @@ def compute(self, records, start, end):
# Compute the width again for corrected peaks
strax.compute_widths(peaklets, select_peaks_indices=peak_list)

hitlet_time_shift = (hitlets['left'] - hitlets['left_integration']) * hitlets['dt']
hit_max_times = hitlets['time'] + hitlet_time_shift # add time shift again to get correct maximum
hit_max_times += hitlets['dt'] * hit_max_sample(records, hitlets)

# Compute tight coincidence level.
# Making this a separate plugin would
# (a) doing hitfinding yet again (or storing hits)
# (b) increase strax memory usage / max_messages,
# possibly due to its currently primitive scheduling.
hit_max_times = np.sort(
hitlets['time']
+ hitlets['dt'] * hit_max_sample(records, hitlets)
+ hitlet_time_shift # add time shift again to get correct maximum
)
hit_max_times_argsort = np.argsort(hit_max_times)
sorted_hit_max_times = hit_max_times[hit_max_times_argsort]
sorted_hit_channels = hitlets['channel'][hit_max_times_argsort]
peaklet_max_times = (
peaklets['time']
+ np.argmax(peaklets['data'], axis=1) * peaklets['dt'])
peaklets['time']
+ np.argmax(peaklets['data'], axis=1) * peaklets['dt'])
tight_coincidence_channel = get_tight_coin(
hit_max_times,
hitlets['channel'],
sorted_hit_max_times,
sorted_hit_channels,
peaklet_max_times,
self.tight_coincidence_window_left,
self.tight_coincidence_window_right,
Expand Down
12 changes: 12 additions & 0 deletions tests/plugins/peak_building.py
Expand Up @@ -56,5 +56,17 @@ def test_saturation_correction(self: PluginTestCase):
# TODO add more tests to see if results make sense


@PluginTestAccumulator.register('test_tight_coincidence')
def test_tight_coincidence(self: PluginTestCase):
"""Test whether tight_coincidence is correctly reconstructed"""
if str(self.st.key_for(self.run_id, 'raw_records')) != '012882-raw_records-z7q2d2ye2t':
print('skip checking because complexity')
return
peaklets = self.st.get_array(self.run_id, 'peaklets', progress_bar=False)
message = 'There might be some issue in tight_coincidence.'
sum_tight_coincidence = np.sum(peaklets['tight_coincidence'])
assert sum_tight_coincidence == 1992, message


if __name__ == '__main__':
run_pytest_from_main()
11 changes: 5 additions & 6 deletions tests/test_peaklet_processing.py
Expand Up @@ -51,7 +51,7 @@ def test_n_hits():
@given(fake_hits,
strat.lists(elements=strat.integers(0, 9), min_size=20))
@settings(deadline=None)
def test_tight_coincidence(hits, channel):
def test_get_tight_coin(hits, channel):
hits['area'] = 1
hits['channel'] = channel[:len(hits)] # In case there are less channel then hits (unlikely)
gap_threshold = 10
Expand All @@ -68,11 +68,10 @@ def test_tight_coincidence(hits, channel):
left = 5
right = 5
tight_coin_channel = get_tight_coin(hits_max_time,
hits['channel'],
peaks_max_time,
left,
right,
)
hits['channel'],
peaks_max_time,
left,
right)
for ind, p_max_t in enumerate(peaks_max_time):
m_hits_in_peak = (hits_max_time >= (p_max_t - left))
m_hits_in_peak &= (hits_max_time <= (p_max_t + right))
Expand Down

0 comments on commit 6f62f87

Please sign in to comment.