Skip to content

Commit 660050d

Browse files
irudoySomethingNew71
authored andcommitted
fix(mlg): correct timestamp unit and viewport-aware chart detail
Two MLG-related fixes bundled together: 1. Parser: MLG raw u16 timestamps are 10 µs/tick per the EFI Analytics MLG Binary Log Format spec, not 1 ms/tick. The previous formula compounded a 10× error in both the raw remainder and the wrap count for a net 100× over-estimate of wall-clock time (a ~30 min log showed as ~48 hours). Fixed via named constants MLG_TICK_SECONDS = 1e-5 and MLG_WRAP_TICKS = 65536, plus a regression test covering rusefi/speeduino sample logs. 2. UI: chart downsampling now slices raw data to the visible viewport before LTTB so detail scales with zoom. Previously LTTB compressed the full log to MAX_CHART_POINTS regardless of zoom, leaving fine detail invisible. Viewport bounds are remembered per plot area between frames; Y normalization uses the channel's global min/max so heights stay stable across pans.
1 parent 5cdf8a4 commit 660050d

4 files changed

Lines changed: 206 additions & 72 deletions

File tree

src/app.rs

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,11 @@ pub struct UltraLogApp {
5555
pub(crate) downsample_cache: HashMap<CacheKey, Vec<[f64; 2]>>,
5656
/// Cache for channel min/max values (avoids O(n) scans)
5757
pub(crate) minmax_cache: HashMap<CacheKey, (f64, f64)>,
58+
/// Last X-axis bounds shown by each plot area. Used to slice raw data to
59+
/// the visible viewport before LTTB-downsampling, so chart detail scales
60+
/// with zoom level instead of being fixed at MAX_CHART_POINTS over the
61+
/// full log range. Keyed by plot_area_id (0 in single-plot mode).
62+
pub(crate) chart_last_x_bounds: HashMap<usize, (f64, f64)>,
5863
/// Current cursor position in seconds (timeline feature)
5964
pub(crate) cursor_time: Option<f64>,
6065
/// Total time range across all loaded files (min, max)
@@ -175,6 +180,7 @@ impl Default for UltraLogApp {
175180
loading_state: LoadingState::Idle,
176181
downsample_cache: HashMap::new(),
177182
minmax_cache: HashMap::new(),
183+
chart_last_x_bounds: HashMap::new(),
178184
cursor_time: None,
179185
time_range: None,
180186
cursor_record: None,
@@ -919,6 +925,10 @@ impl UltraLogApp {
919925
}
920926
self.minmax_cache = new_minmax_cache;
921927

928+
// Reset viewport-bounds memory so the next frame after a file is
929+
// removed picks fresh bounds from whatever data remains.
930+
self.chart_last_x_bounds.clear();
931+
922932
// Clear computed channels for this file and update indices
923933
self.file_computed_channels.remove(&index);
924934
let mut new_computed_channels = HashMap::new();

src/parsers/speeduino.rs

Lines changed: 56 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -285,11 +285,16 @@ impl Speeduino {
285285
let mut times: Vec<f64> = Vec::with_capacity(estimated_records);
286286
let mut data_records: Vec<Vec<Value>> = Vec::with_capacity(estimated_records);
287287

288-
// Track timestamp wraparound (u16 wraps at 65535ms = 65.535 seconds)
288+
// Track u16 timestamp wraparound. Per the EFI Analytics MLG Binary
289+
// Log Format spec, each tick is 10 µs, so the u16 wraps every
290+
// 65536 × 10 µs = 0.65536 s of wall-clock time.
291+
const MLG_TICK_SECONDS: f64 = 1e-5;
292+
const MLG_WRAP_TICKS: f64 = 65_536.0;
289293
let mut prev_raw_timestamp: u16 = 0;
290294
let mut wrap_count: u64 = 0;
291-
// If timestamp drops by more than 30 seconds, it definitely wrapped
292-
// (actual wraparounds show ~58.7s drop when going from ~65s to ~6s)
295+
// A drop > 30000 raw ticks (= 0.3 s) is far above the per-record
296+
// increment at any realistic ECU sample rate (20–1000 Hz), so it
297+
// reliably distinguishes a real u16 wraparound from sample jitter.
293298
const WRAP_THRESHOLD: u16 = 30000;
294299

295300
while offset + 4 <= data.len() {
@@ -316,7 +321,8 @@ impl Speeduino {
316321
prev_raw_timestamp = raw_timestamp;
317322

318323
// Calculate actual timestamp with wraparound compensation
319-
let timestamp = (raw_timestamp as f64 / 1000.0) + (wrap_count as f64 * 65.536);
324+
let timestamp =
325+
(raw_timestamp as f64 + wrap_count as f64 * MLG_WRAP_TICKS) * MLG_TICK_SECONDS;
320326

321327
if block_type == 0 {
322328
// Data record - calculate required bytes for all channels
@@ -819,4 +825,50 @@ mod tests {
819825
eprintln!("Parsed {} channels from rusEFI log", log.channels.len());
820826
eprintln!("Parsed {} data records", log.data.len());
821827
}
828+
829+
#[test]
830+
fn test_mlg_timestamp_scale() {
831+
// Regression guard for the 10 µs/bit timestamp unit. A previous
832+
// version of the parser treated the u16 tick as milliseconds, which
833+
// multiplied wall-clock time by ~100×. The bounds below are wide
834+
// enough to allow any realistic ECU sample rate (1 ms .. 1 s per
835+
// record) and tight enough to fail loudly on a ×10 or ×100 drift.
836+
for file_path in [
837+
"exampleLogs/rusefi/rusefilog.mlg",
838+
"exampleLogs/rusefi/Log1.mlg",
839+
"exampleLogs/speeduino/speeduino.mlg",
840+
] {
841+
let data = match std::fs::read(file_path) {
842+
Ok(d) => d,
843+
Err(_) => {
844+
eprintln!("Skipping {}: file not found", file_path);
845+
continue;
846+
}
847+
};
848+
849+
let log = Speeduino::parse_binary(&data)
850+
.unwrap_or_else(|e| panic!("parse {}: {}", file_path, e));
851+
assert!(
852+
log.times.len() > 1,
853+
"{}: expected multiple records",
854+
file_path
855+
);
856+
857+
let total = *log.times.last().unwrap() - log.times[0];
858+
let avg_dt = total / (log.times.len() - 1) as f64;
859+
assert!(
860+
(1e-3..=1.0).contains(&avg_dt),
861+
"{}: average sample interval {:.6}s outside 1ms..1s — units regression?",
862+
file_path,
863+
avg_dt
864+
);
865+
eprintln!(
866+
"{}: {} records, total {:.3}s, avg dt {:.4}s",
867+
file_path,
868+
log.times.len(),
869+
total,
870+
avg_dt
871+
);
872+
}
873+
}
822874
}

src/ui/chart.rs

Lines changed: 91 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@ use rust_i18n::t;
77
use crate::app::UltraLogApp;
88
use crate::normalize::normalize_channel_name_with_custom;
99
use crate::state::{
10-
CacheKey, PlotArea, SelectedChannel, CHART_COLORS, COLORBLIND_COLORS, MAX_CHART_POINTS,
11-
MIN_PLOT_HEIGHT, PLOT_RESIZE_HANDLE_HEIGHT,
10+
PlotArea, SelectedChannel, CHART_COLORS, COLORBLIND_COLORS, MAX_CHART_POINTS, MIN_PLOT_HEIGHT,
11+
PLOT_RESIZE_HANDLE_HEIGHT,
1212
};
1313

1414
/// Sensitivity multiplier for scroll-to-zoom (higher = faster zoom per scroll tick).
@@ -76,32 +76,16 @@ impl UltraLogApp {
7676
return;
7777
}
7878

79-
// Pre-compute and cache downsampled + normalized data for all selected channels
80-
for selected in &selected_channels {
81-
if selected.file_index >= self.files.len() {
82-
continue;
83-
}
84-
85-
let cache_key = CacheKey {
86-
file_index: selected.file_index,
87-
channel_index: selected.channel_index,
88-
plot_area_id: 0, // Single-plot mode uses plot_area_id 0
89-
};
90-
91-
if !self.downsample_cache.contains_key(&cache_key) {
92-
let file = &self.files[selected.file_index];
93-
let times = file.log.get_times_as_f64();
94-
// Use app method to get channel data (handles both regular and computed channels)
95-
let data = self.get_channel_data(selected.file_index, selected.channel_index);
96-
97-
if times.len() == data.len() && !times.is_empty() {
98-
let downsampled = Self::downsample_lttb(times, &data, MAX_CHART_POINTS);
99-
// Normalize Y values to 0-1 range so all channels overlay
100-
let normalized = Self::normalize_points(&downsampled);
101-
self.downsample_cache.insert(cache_key, normalized);
102-
}
103-
}
104-
}
79+
// Compute downsampled + normalized data sliced to the current viewport.
80+
// Detail scales with zoom level: a 1% viewport gets MAX_CHART_POINTS
81+
// over that 1%, not over the whole log.
82+
let viewport = self.chart_last_x_bounds.get(&0).copied();
83+
let chart_points: Vec<Option<Vec<[f64; 2]>>> = selected_channels
84+
.iter()
85+
.map(|selected| {
86+
self.compute_viewport_points(selected.file_index, selected.channel_index, viewport)
87+
})
88+
.collect();
10589

10690
// Pre-compute legend names with current values at cursor position
10791
let use_normalization = self.field_normalization;
@@ -139,7 +123,7 @@ impl UltraLogApp {
139123
.collect();
140124

141125
// Prepare data for the plot closure (can't borrow self mutably inside)
142-
let cache = &self.downsample_cache;
126+
let chart_points = &chart_points;
143127
let files = &self.files;
144128
// selected_channels already defined at top of function from get_selected_channels()
145129
let cursor_time = self.get_cursor_time();
@@ -275,13 +259,7 @@ impl UltraLogApp {
275259
continue;
276260
}
277261

278-
let cache_key = CacheKey {
279-
file_index: selected.file_index,
280-
channel_index: selected.channel_index,
281-
plot_area_id: 0, // Single-plot mode uses plot_area_id 0
282-
};
283-
284-
if let Some(points) = cache.get(&cache_key) {
262+
if let Some(points) = chart_points.get(i).and_then(|p| p.as_ref()) {
285263
let plot_points: PlotPoints = points.iter().copied().collect();
286264
let palette = if color_blind_mode {
287265
COLORBLIND_COLORS
@@ -314,6 +292,12 @@ impl UltraLogApp {
314292
plot_ui.pointer_coordinate()
315293
});
316294

295+
// Remember the X-axis bounds we just rendered so the next frame can
296+
// slice raw data to this viewport before LTTB-downsampling.
297+
let final_bounds = response.transform.bounds();
298+
self.chart_last_x_bounds
299+
.insert(0, (final_bounds.min()[0], final_bounds.max()[0]));
300+
317301
// Detect user interaction with chart (drag, zoom, scroll)
318302
// This marks the chart as "interacted" so we stop using the initial zoomed view
319303
if response.response.dragged()
@@ -506,30 +490,14 @@ impl UltraLogApp {
506490
plot_area_id: usize,
507491
height: f32,
508492
) {
509-
// Pre-compute and cache data for these channels
510-
for selected in channels {
511-
if selected.file_index >= self.files.len() {
512-
continue;
513-
}
514-
515-
let cache_key = CacheKey {
516-
file_index: selected.file_index,
517-
channel_index: selected.channel_index,
518-
plot_area_id,
519-
};
520-
521-
if !self.downsample_cache.contains_key(&cache_key) {
522-
let file = &self.files[selected.file_index];
523-
let times = file.log.get_times_as_f64();
524-
let data = self.get_channel_data(selected.file_index, selected.channel_index);
525-
526-
if times.len() == data.len() && !times.is_empty() {
527-
let downsampled = Self::downsample_lttb(times, &data, MAX_CHART_POINTS);
528-
let normalized = Self::normalize_points(&downsampled);
529-
self.downsample_cache.insert(cache_key, normalized);
530-
}
531-
}
532-
}
493+
// Compute viewport-aware downsampled + normalized points for this plot area.
494+
let viewport = self.chart_last_x_bounds.get(&plot_area_id).copied();
495+
let chart_points: Vec<Option<Vec<[f64; 2]>>> = channels
496+
.iter()
497+
.map(|selected| {
498+
self.compute_viewport_points(selected.file_index, selected.channel_index, viewport)
499+
})
500+
.collect();
533501

534502
// Build legend names with values
535503
let use_normalization = self.field_normalization;
@@ -567,7 +535,7 @@ impl UltraLogApp {
567535
.collect();
568536

569537
// Prepare data for plot
570-
let cache = &self.downsample_cache;
538+
let chart_points = &chart_points;
571539
let files = &self.files;
572540
let cursor_time = self.get_cursor_time();
573541
let cursor_tracking = self.cursor_tracking;
@@ -652,13 +620,7 @@ impl UltraLogApp {
652620
continue;
653621
}
654622

655-
let cache_key = CacheKey {
656-
file_index: selected.file_index,
657-
channel_index: selected.channel_index,
658-
plot_area_id,
659-
};
660-
661-
if let Some(points) = cache.get(&cache_key) {
623+
if let Some(points) = chart_points.get(i).and_then(|p| p.as_ref()) {
662624
let plot_points: PlotPoints = points.iter().copied().collect();
663625
let palette = if color_blind_mode {
664626
COLORBLIND_COLORS
@@ -688,6 +650,12 @@ impl UltraLogApp {
688650
plot_ui.pointer_coordinate()
689651
});
690652

653+
// Save the bounds we just rendered so the next frame's downsample
654+
// matches the visible viewport.
655+
let final_bounds = response.transform.bounds();
656+
self.chart_last_x_bounds
657+
.insert(plot_area_id, (final_bounds.min()[0], final_bounds.max()[0]));
658+
691659
// Detect interaction
692660
if response.response.dragged()
693661
|| response.response.drag_started()
@@ -872,6 +840,61 @@ impl UltraLogApp {
872840
}
873841
}
874842

843+
/// Compute the points to plot for one channel, sliced to the currently
844+
/// visible viewport before LTTB-downsampling. Y is normalized to [0, 1]
845+
/// against the channel's full-range min/max so heights stay stable when
846+
/// the user pans or zooms. `viewport` is the previous frame's X bounds;
847+
/// when `None` (e.g., first frame after load) the full data range is used.
848+
fn compute_viewport_points(
849+
&mut self,
850+
file_index: usize,
851+
channel_index: usize,
852+
viewport: Option<(f64, f64)>,
853+
) -> Option<Vec<[f64; 2]>> {
854+
let file = self.files.get(file_index)?;
855+
let times = file.log.get_times_as_f64();
856+
let data = self.get_channel_data(file_index, channel_index);
857+
if times.is_empty() || times.len() != data.len() {
858+
return None;
859+
}
860+
861+
let (lo, hi) = match viewport {
862+
Some((vmin, vmax)) if vmax > vmin => {
863+
let pad = (vmax - vmin) * 0.1;
864+
let lo_t = vmin - pad;
865+
let hi_t = vmax + pad;
866+
let lo_i = times.partition_point(|&t| t < lo_t).saturating_sub(1);
867+
let hi_i = times
868+
.partition_point(|&t| t <= hi_t)
869+
.saturating_add(1)
870+
.min(times.len());
871+
(lo_i, hi_i.max(lo_i + 1))
872+
}
873+
_ => (0, times.len()),
874+
};
875+
876+
let times_slice = &times[lo..hi];
877+
let data_slice = &data[lo..hi];
878+
let downsampled = Self::downsample_lttb(times_slice, data_slice, MAX_CHART_POINTS);
879+
880+
let (min_y, max_y) = self
881+
.get_channel_min_max(file_index, channel_index)
882+
.unwrap_or((0.0, 1.0));
883+
let range = (max_y - min_y).abs();
884+
// Constant channels (range ≈ 0) get parked at the middle of the
885+
// overlay strip so they remain visible instead of pinning to the
886+
// bottom edge — matches the prior `normalize_points` behavior.
887+
if range < f64::EPSILON {
888+
return Some(downsampled.into_iter().map(|p| [p[0], 0.5]).collect());
889+
}
890+
Some(
891+
downsampled
892+
.into_iter()
893+
.map(|p| [p[0], (p[1] - min_y) / range])
894+
.collect(),
895+
)
896+
}
897+
875898
/// Normalize values to 0-1 range for overlay display
876899
pub fn normalize_points(points: &[[f64; 2]]) -> Vec<[f64; 2]> {
877900
if points.is_empty() {

tests/parsers/speeduino_tests.rs

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -290,6 +290,55 @@ fn test_speeduino_timestamp_monotonicity() {
290290
assert_monotonic_times(&log);
291291
}
292292

293+
/// Regression guard for the MLG raw u16 timestamp unit (10 µs/tick per the
294+
/// EFI Analytics MLG spec). A previous version of the parser used 1 ms/tick,
295+
/// which compounded with the wraparound logic to inflate wall-clock time
296+
/// ~100×. We bound the total parsed duration to a value that all bundled
297+
/// sample logs comfortably satisfy and that any 10× or 100× drift would
298+
/// blow past.
299+
fn assert_mlg_total_duration_under(file_path: &str, max_seconds: f64) {
300+
if !example_file_exists(file_path) {
301+
eprintln!("Skipping {}: file not found", file_path);
302+
return;
303+
}
304+
305+
let data = read_example_binary(file_path);
306+
let log = Speeduino::parse_binary(&data)
307+
.unwrap_or_else(|e| panic!("Failed to parse {}: {}", file_path, e));
308+
309+
assert!(
310+
log.times.len() > 1,
311+
"{}: expected multiple records",
312+
file_path
313+
);
314+
315+
let total = *log.times.last().unwrap() - log.times[0];
316+
assert!(
317+
total > 0.0 && total < max_seconds,
318+
"{}: total duration {:.3}s outside (0, {})s — timestamp unit regression?",
319+
file_path,
320+
total,
321+
max_seconds
322+
);
323+
}
324+
325+
#[test]
326+
fn test_speeduino_mlg_duration_bounded() {
327+
// ~30 minutes is well above any of the bundled speeduino samples but
328+
// far below the ~50 hour figure the old 1ms-tick interpretation produced.
329+
assert_mlg_total_duration_under(SPEEDUINO_MLG, 30.0 * 60.0);
330+
}
331+
332+
#[test]
333+
fn test_rusefi_mlg_duration_bounded() {
334+
assert_mlg_total_duration_under(RUSEFI_MLG, 30.0 * 60.0);
335+
}
336+
337+
#[test]
338+
fn test_rusefi_log1_duration_bounded() {
339+
assert_mlg_total_duration_under(RUSEFI_LOG1, 30.0 * 60.0);
340+
}
341+
293342
#[test]
294343
fn test_speeduino_timestamp_range() {
295344
if !example_file_exists(SPEEDUINO_MLG) {

0 commit comments

Comments
 (0)