-
-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathsourcemap_visualizer.rs
137 lines (120 loc) · 4.77 KB
/
sourcemap_visualizer.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
use std::borrow::Cow;
use crate::SourceMap;
/// The `SourcemapVisualizer` is a helper for sourcemap testing.
/// It print the mapping of original content and final content tokens.
pub struct SourcemapVisualizer<'a> {
output: &'a str,
sourcemap: &'a SourceMap,
}
impl<'a> SourcemapVisualizer<'a> {
pub fn new(output: &'a str, sourcemap: &'a SourceMap) -> Self {
Self { output, sourcemap }
}
pub fn into_visualizer_text(self) -> String {
let mut s = String::new();
let Some(source_contents) = &self.sourcemap.source_contents else {
s.push_str("[no source contents]\n");
return s;
};
let source_contents_lines_map: Vec<Vec<Vec<u16>>> = source_contents
.iter()
.map(|content| Self::generate_line_utf16_tables(content))
.collect();
let output_lines = Self::generate_line_utf16_tables(self.output);
let tokens = &self.sourcemap.tokens;
let mut last_source: Option<&str> = None;
for i in 0..tokens.len() {
let t = &tokens[i];
let Some(source_id) = t.source_id else {
continue;
};
let Some(source) = self.sourcemap.get_source(source_id) else { continue };
let source_lines = &source_contents_lines_map[source_id as usize];
// Print source
if last_source != Some(source) {
s.push('-');
s.push(' ');
s.push_str(source);
s.push('\n');
last_source = Some(source);
}
// validate token position
let dst_invalid = t.dst_line as usize >= output_lines.len()
|| (t.dst_col as usize) >= output_lines[t.dst_line as usize].len();
let src_invalid = t.src_line as usize >= source_lines.len()
|| (t.src_col as usize) >= source_lines[t.src_line as usize].len();
if dst_invalid || src_invalid {
s.push_str(&format!(
"({}:{}){} --> ({}:{}){}\n",
t.src_line,
t.src_col,
if src_invalid { " [invalid]" } else { "" },
t.dst_line,
t.dst_col,
if dst_invalid { " [invalid]" } else { "" },
));
continue;
}
// find next dst column or EOL
let dst_end_col = {
match tokens.get(i + 1) {
Some(t2) if t2.dst_line == t.dst_line => t2.dst_col,
_ => output_lines[t.dst_line as usize].len() as u32,
}
};
// find next src column or EOL
let src_end_col = 'result: {
for t2 in &tokens[i + 1..] {
if t2.source_id == t.source_id && t2.src_line == t.src_line {
// skip duplicate or backward
if t2.src_col <= t.src_col {
continue;
}
break 'result t2.src_col;
}
break;
}
source_lines[t.src_line as usize].len() as u32
};
s.push_str(&format!(
"({}:{}) {:?} --> ({}:{}) {:?}\n",
t.src_line,
t.src_col,
Self::str_slice_by_token(source_lines, t.src_line, t.src_col, src_end_col),
t.dst_line,
t.dst_col,
Self::str_slice_by_token(&output_lines, t.dst_line, t.dst_col, dst_end_col)
));
}
s
}
fn generate_line_utf16_tables(content: &str) -> Vec<Vec<u16>> {
let mut tables = vec![];
let mut line_byte_offset = 0;
for (i, ch) in content.char_indices() {
match ch {
'\r' | '\n' | '\u{2028}' | '\u{2029}' => {
// Handle Windows-specific "\r\n" newlines
if ch == '\r' && content.chars().nth(i + 1) == Some('\n') {
continue;
}
tables.push(content[line_byte_offset..=i].encode_utf16().collect::<Vec<_>>());
line_byte_offset = i + 1;
}
_ => {}
}
}
tables.push(content[line_byte_offset..].encode_utf16().collect::<Vec<_>>());
tables
}
fn str_slice_by_token(buff: &[Vec<u16>], line: u32, start: u32, end: u32) -> Cow<'_, str> {
let line = line as usize;
let start = start as usize;
let end = end as usize;
let s = &buff[line];
String::from_utf16(&s[start.min(end).min(s.len())..start.max(end).min(s.len())])
.unwrap()
.replace("\r", "")
.into()
}
}