-
Notifications
You must be signed in to change notification settings - Fork 1.9k
ra_syntax: reshape SyntaxError for the sake of removing redundancy #3026
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
9fdf984
c582766
6ae4850
acdab6f
e00922d
b510e77
e05eb63
cd8e56c
fc5e7b8
053ccf4
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -27,8 +27,8 @@ pub(crate) fn incremental_reparse( | |
| edit: &AtomTextEdit, | ||
| errors: Vec<SyntaxError>, | ||
| ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { | ||
| if let Some((green, old_range)) = reparse_token(node, &edit) { | ||
| return Some((green, merge_errors(errors, Vec::new(), old_range, edit), old_range)); | ||
| if let Some((green, new_errors, old_range)) = reparse_token(node, &edit) { | ||
| return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); | ||
| } | ||
|
|
||
| if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) { | ||
|
|
@@ -40,7 +40,7 @@ pub(crate) fn incremental_reparse( | |
| fn reparse_token<'node>( | ||
| root: &'node SyntaxNode, | ||
| edit: &AtomTextEdit, | ||
| ) -> Option<(GreenNode, TextRange)> { | ||
| ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { | ||
| let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); | ||
| let prev_token_kind = prev_token.kind(); | ||
| match prev_token_kind { | ||
|
|
@@ -54,7 +54,7 @@ fn reparse_token<'node>( | |
| } | ||
|
|
||
| let mut new_text = get_text_after_edit(prev_token.clone().into(), &edit); | ||
| let (new_token_kind, _error) = lex_single_syntax_kind(&new_text)?; | ||
| let (new_token_kind, new_err) = lex_single_syntax_kind(&new_text)?; | ||
|
|
||
| if new_token_kind != prev_token_kind | ||
| || (new_token_kind == IDENT && is_contextual_kw(&new_text)) | ||
|
|
@@ -76,7 +76,11 @@ fn reparse_token<'node>( | |
|
|
||
| let new_token = | ||
| GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into()); | ||
| Some((prev_token.replace_with(new_token), prev_token.text_range())) | ||
| Some(( | ||
| prev_token.replace_with(new_token), | ||
| new_err.into_iter().collect(), | ||
| prev_token.text_range(), | ||
| )) | ||
| } | ||
| _ => None, | ||
| } | ||
|
|
@@ -87,7 +91,7 @@ fn reparse_block<'node>( | |
| edit: &AtomTextEdit, | ||
| ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { | ||
| let (node, reparser) = find_reparsable_node(root, edit.delete)?; | ||
| let text = get_text_after_edit(node.clone().into(), &edit); | ||
| let text = get_text_after_edit(node.clone().into(), edit); | ||
|
|
||
| let (tokens, new_lexer_errors) = tokenize(&text); | ||
| if !is_balanced(&tokens) { | ||
|
|
@@ -162,20 +166,27 @@ fn is_balanced(tokens: &[Token]) -> bool { | |
| fn merge_errors( | ||
| old_errors: Vec<SyntaxError>, | ||
| new_errors: Vec<SyntaxError>, | ||
| old_range: TextRange, | ||
| range_before_reparse: TextRange, | ||
| edit: &AtomTextEdit, | ||
| ) -> Vec<SyntaxError> { | ||
| let mut res = Vec::new(); | ||
| for e in old_errors { | ||
| if e.offset() <= old_range.start() { | ||
| res.push(e) | ||
| } else if e.offset() >= old_range.end() { | ||
| res.push(e.add_offset(TextUnit::of_str(&edit.insert), edit.delete.len())); | ||
|
|
||
| for old_err in old_errors { | ||
| let old_err_range = old_err.range(); | ||
| // FIXME: make sure that .start() was here previously by a mistake | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This fixme will be removed before merging this PR There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It should be TODO then There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes, I just temporarily marked it FIXME to let it pass |
||
| if old_err_range.end() <= range_before_reparse.start() { | ||
| res.push(old_err); | ||
| } else if old_err_range.start() >= range_before_reparse.end() { | ||
| let inserted_len = TextUnit::of_str(&edit.insert); | ||
| res.push(old_err.with_range((old_err_range + inserted_len) - edit.delete.len())); | ||
| // Note: extra parens are intentional to prevent uint underflow, HWAB (here was a bug) | ||
| } | ||
| } | ||
| for e in new_errors { | ||
| res.push(e.add_offset(old_range.start(), 0.into())); | ||
| } | ||
| res.extend(new_errors.into_iter().map(|new_err| { | ||
| // fighting borrow checker with a variable ;) | ||
| let offseted_range = new_err.range() + range_before_reparse.start(); | ||
| new_err.with_range(offseted_range) | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I am surprised how this reports a move after borrow error new_err.with_range(*new_err.range() + range_before_reparse.start())
// where
fn with_range(mut self, range: TextRange) -> Self;
fn range(&self) -> &TextRange; |
||
| })); | ||
| res | ||
| } | ||
|
|
||
|
|
@@ -193,9 +204,9 @@ mod tests { | |
|
|
||
| let fully_reparsed = SourceFile::parse(&after); | ||
| let incrementally_reparsed: Parse<SourceFile> = { | ||
| let f = SourceFile::parse(&before); | ||
| let before = SourceFile::parse(&before); | ||
| let (green, new_errors, range) = | ||
| incremental_reparse(f.tree().syntax(), &edit, f.errors.to_vec()).unwrap(); | ||
| incremental_reparse(before.tree().syntax(), &edit, before.errors.to_vec()).unwrap(); | ||
| assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); | ||
| Parse::new(green, new_errors) | ||
| }; | ||
|
|
@@ -204,6 +215,7 @@ mod tests { | |
| &format!("{:#?}", fully_reparsed.tree().syntax()), | ||
| &format!("{:#?}", incrementally_reparsed.tree().syntax()), | ||
| ); | ||
| assert_eq!(fully_reparsed.errors(), incrementally_reparsed.errors()); | ||
| } | ||
|
|
||
| #[test] // FIXME: some test here actually test token reparsing | ||
|
|
@@ -402,4 +414,42 @@ enum Foo { | |
| 4, | ||
| ); | ||
| } | ||
|
|
||
| #[test] | ||
| fn reparse_str_token_with_error_unchanged() { | ||
| do_check(r#""<|>Unclosed<|> string literal"#, "Still unclosed", 24); | ||
| } | ||
|
|
||
| #[test] | ||
| fn reparse_str_token_with_error_fixed() { | ||
| do_check(r#""unterinated<|><|>"#, "\"", 12); | ||
| } | ||
|
|
||
| #[test] | ||
| fn reparse_block_with_error_in_middle_unchanged() { | ||
| do_check( | ||
| r#"fn main() { | ||
| if {} | ||
| 32 + 4<|><|> | ||
| return | ||
| if {} | ||
| }"#, | ||
| "23", | ||
| 105, | ||
| ) | ||
| } | ||
|
|
||
| #[test] | ||
| fn reparse_block_with_error_in_middle_fixed() { | ||
| do_check( | ||
| r#"fn main() { | ||
| if {} | ||
| 32 + 4<|><|> | ||
| return | ||
| if {} | ||
| }"#, | ||
| ";", | ||
| 105, | ||
| ) | ||
| } | ||
| } | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
AHTUNG! I suppose in this if statement we had a mistake, we need to check that
e.range().end() <= old_range.start()and I corrected it in this PR (tests pass anyway...)