Skip to content

Commit f5bf3cc

Browse files
fix: Allow floods to merge same event multiple times
1 parent db720bf commit f5bf3cc

File tree

1 file changed

+62
-7
lines changed

1 file changed

+62
-7
lines changed

aw-transform/src/flood.rs

Lines changed: 62 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -8,13 +8,15 @@ pub fn flood(events: Vec<Event>, pulsetime: chrono::Duration) -> Vec<Event> {
88
let mut events_sorted = sort_by_timestamp(events);
99
let mut e1_iter = events_sorted.drain(..).peekable();
1010
let mut new_events = Vec::new();
11-
let mut drop_next = false;
1211
let mut gap_prev: Option<chrono::Duration> = None;
13-
while let Some(mut e1) = e1_iter.next() {
14-
if drop_next {
15-
drop_next = false;
16-
continue;
12+
let mut retry_e: Option<Event> = None;
13+
while let Some(mut e1) = match retry_e {
14+
Some(e) => {
15+
retry_e = None;
16+
Some(e)
1717
}
18+
None => e1_iter.next(),
19+
} {
1820
if let Some(gap) = gap_prev {
1921
e1.timestamp = e1.timestamp - (gap / 2);
2022
e1.duration = e1.duration + (gap / 2);
@@ -37,7 +39,6 @@ pub fn flood(events: Vec<Event>, pulsetime: chrono::Duration) -> Vec<Event> {
3739
warned_negative_gap_safe = true;
3840
}
3941
// Choose the longest event and set the endtime to it
40-
// TODO: Also possibly extend to an e3 if that exists?
4142
let e1_endtime = e1.calculate_endtime();
4243
let e2_endtime = e2.calculate_endtime();
4344
if e2_endtime > e1_endtime {
@@ -46,7 +47,13 @@ pub fn flood(events: Vec<Event>, pulsetime: chrono::Duration) -> Vec<Event> {
4647
e1.duration = e1_endtime - e1.timestamp;
4748
}
4849
// Drop next event since they are merged and flooded into e1
49-
drop_next = true;
50+
e1_iter.next();
51+
// Retry this event again to give it a change to merge e1
52+
// with 'e3'
53+
retry_e = Some(e1);
54+
// Since we are retrying on this event we don't want to push it
55+
// to the new_events vec
56+
continue;
5057
} else {
5158
if chrono::Duration::seconds(0) > gap {
5259
if !warned_negative_gap_unsafe {
@@ -135,6 +142,10 @@ mod tests {
135142

136143
#[test]
137144
fn test_flood_same_timestamp() {
145+
// e1, stay same
146+
// e2, base merge (longest duration, this should be the duration selected)
147+
// e3, merge with e2
148+
// e4, stay same
138149
let e1 = Event {
139150
id: None,
140151
timestamp: DateTime::from_str("2000-01-01T00:00:00Z").unwrap(),
@@ -167,5 +178,49 @@ mod tests {
167178
assert_eq!(&res[0], &e1);
168179
assert_eq!(&res[1], &e2);
169180
assert_eq!(&res[2], &e4);
181+
182+
// e1, stay same
183+
// e2, base merge
184+
// e3, merge with e2
185+
// e4, merge with e2 (longest duration, this should be the duration selected)
186+
// e5, stay same
187+
let e1 = Event {
188+
id: None,
189+
timestamp: DateTime::from_str("2000-01-01T00:00:00Z").unwrap(),
190+
duration: Duration::seconds(1),
191+
data: json_map! {"status": "afk"},
192+
};
193+
let e2 = Event {
194+
id: None,
195+
timestamp: DateTime::from_str("2000-01-01T00:00:01Z").unwrap(),
196+
duration: Duration::seconds(5),
197+
data: json_map! {"status": "not-afk"},
198+
};
199+
let e3 = Event {
200+
id: None,
201+
timestamp: DateTime::from_str("2000-01-01T00:00:01Z").unwrap(),
202+
duration: Duration::seconds(1),
203+
data: json_map! {"status": "not-afk"},
204+
};
205+
let e4 = Event {
206+
id: None,
207+
timestamp: DateTime::from_str("2000-01-01T00:00:01Z").unwrap(),
208+
duration: Duration::seconds(10),
209+
data: json_map! {"status": "not-afk"},
210+
};
211+
let e5 = Event {
212+
id: None,
213+
timestamp: DateTime::from_str("2000-01-01T00:00:11Z").unwrap(),
214+
duration: Duration::seconds(1),
215+
data: json_map! {"status": "afk"},
216+
};
217+
let res = flood(
218+
vec![e1.clone(), e2.clone(), e3.clone(), e4.clone(), e5.clone()],
219+
Duration::seconds(5),
220+
);
221+
assert_eq!(3, res.len());
222+
assert_eq!(&res[0], &e1);
223+
assert_eq!(&res[1], &e4);
224+
assert_eq!(&res[2], &e5);
170225
}
171226
}

0 commit comments

Comments
 (0)