Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

42 changes: 22 additions & 20 deletions crates/hir_expand/src/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@
use std::sync::Arc;

use base_db::{salsa, SourceDatabase};
use itertools::Itertools;
use limit::Limit;
use mbe::{syntax_node_to_token_tree, ExpandError, ExpandResult};
use rustc_hash::FxHashSet;
use syntax::{
algo::diff,
ast::{self, AttrsOwner, NameOwner},
AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, TextRange, T,
AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, T,
};

use crate::{
Expand Down Expand Up @@ -151,7 +151,7 @@ pub fn expand_speculative(
// Build the subtree and token mapping for the speculative args
let censor = censor_for_macro_input(&loc, &speculative_args);
let (mut tt, spec_args_tmap) =
mbe::syntax_node_to_token_tree_censored(&speculative_args, censor);
mbe::syntax_node_to_token_tree_censored(&speculative_args, &censor);

let (attr_arg, token_id) = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => {
Expand Down Expand Up @@ -305,7 +305,7 @@ fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree,

let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, censor);
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, &censor);

if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
Expand All @@ -315,24 +315,26 @@ fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree,
Some(Arc::new((tt, tmap)))
}

fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> Option<TextRange> {
match loc.kind {
MacroCallKind::FnLike { .. } => None,
MacroCallKind::Derive { derive_attr_index, .. } => match ast::Item::cast(node.clone()) {
Some(item) => item
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
(|| {
let censor = match loc.kind {
MacroCallKind::FnLike { .. } => return None,
MacroCallKind::Derive { derive_attr_index, .. } => ast::Item::cast(node.clone())?
.attrs()
.map(|attr| attr.syntax().text_range())
.take(derive_attr_index as usize + 1)
.fold1(TextRange::cover),
None => None,
},
MacroCallKind::Attr { invoc_attr_index, .. } => match ast::Item::cast(node.clone()) {
Some(item) => {
item.attrs().nth(invoc_attr_index as usize).map(|attr| attr.syntax().text_range())
}
None => None,
},
}
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
.map(|it| it.syntax().clone())
.collect(),
MacroCallKind::Attr { invoc_attr_index, .. } => ast::Item::cast(node.clone())?
.attrs()
.nth(invoc_attr_index as usize)
.map(|attr| attr.syntax().clone())
.into_iter()
.collect(),
};
Some(censor)
})()
.unwrap_or_default()
}

fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
Expand Down
68 changes: 41 additions & 27 deletions crates/mbe/src/syntax_bridge.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].

use std::iter;

use parser::{ParseError, TreeSink};
use rustc_hash::FxHashMap;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{
ast::{self, make::tokens::doc_comment},
tokenize, AstToken, Parse, SmolStr, SyntaxKind,
tokenize, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
SyntaxKind::*,
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, T,
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, WalkEvent,
T,
};
use tt::buffer::{Cursor, TokenBuffer};

Expand All @@ -19,14 +18,14 @@ use crate::{
/// Convert the syntax node to a `TokenTree` (what macro
/// will consume).
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
syntax_node_to_token_tree_censored(node, None)
syntax_node_to_token_tree_censored(node, &Default::default())
}

/// Convert the syntax node to a `TokenTree` (what macro will consume)
/// with the censored range excluded.
pub fn syntax_node_to_token_tree_censored(
node: &SyntaxNode,
censor: Option<TextRange>,
censor: &FxHashSet<SyntaxNode>,
) -> (tt::Subtree, TokenMap) {
let global_offset = node.text_range().start();
let mut c = Convertor::new(node, global_offset, censor);
Expand Down Expand Up @@ -424,8 +423,6 @@ impl<'a> SrcToken for (&'a RawToken, &'a str) {
}
}

impl RawConvertor<'_> {}

impl<'a> TokenConvertor for RawConvertor<'a> {
type Token = (&'a RawToken, &'a str);

Expand Down Expand Up @@ -455,30 +452,51 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
}
}

struct Convertor {
struct Convertor<'c> {
id_alloc: TokenIdAlloc,
current: Option<SyntaxToken>,
censor: Option<TextRange>,
preorder: PreorderWithTokens,
censor: &'c FxHashSet<SyntaxNode>,
range: TextRange,
punct_offset: Option<(SyntaxToken, TextSize)>,
}

impl Convertor {
fn new(node: &SyntaxNode, global_offset: TextSize, censor: Option<TextRange>) -> Convertor {
let first = node.first_token();
let current = match censor {
Some(censor) => iter::successors(first, |token| token.next_token())
.find(|token| !censor.contains_range(token.text_range())),
None => first,
};
impl<'c> Convertor<'c> {
fn new(
node: &SyntaxNode,
global_offset: TextSize,
censor: &'c FxHashSet<SyntaxNode>,
) -> Convertor<'c> {
let range = node.text_range();
let mut preorder = node.preorder_with_tokens();
let first = Self::next_token(&mut preorder, censor);
Convertor {
id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
current,
range: node.text_range(),
current: first,
preorder,
range,
censor,
punct_offset: None,
}
}

fn next_token(
preorder: &mut PreorderWithTokens,
censor: &FxHashSet<SyntaxNode>,
) -> Option<SyntaxToken> {
while let Some(ev) = preorder.next() {
let ele = match ev {
WalkEvent::Enter(ele) => ele,
_ => continue,
};
match ele {
SyntaxElement::Token(t) => return Some(t),
SyntaxElement::Node(node) if censor.contains(&node) => preorder.skip_subtree(),
SyntaxElement::Node(_) => (),
}
}
None
}
}

#[derive(Debug)]
Expand Down Expand Up @@ -511,7 +529,7 @@ impl SrcToken for SynToken {
}
}

impl TokenConvertor for Convertor {
impl TokenConvertor for Convertor<'_> {
type Token = SynToken;
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
convert_doc_comment(token.token())
Expand All @@ -532,11 +550,7 @@ impl TokenConvertor for Convertor {
if !&self.range.contains_range(curr.text_range()) {
return None;
}
self.current = match self.censor {
Some(censor) => iter::successors(curr.next_token(), |token| token.next_token())
.find(|token| !censor.contains_range(token.text_range())),
None => curr.next_token(),
};
self.current = Self::next_token(&mut self.preorder, self.censor);
let token = if curr.kind().is_punct() {
let range = curr.text_range();
let range = TextRange::at(range.start(), TextSize::of('.'));
Expand Down
29 changes: 19 additions & 10 deletions crates/mbe/src/tests.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
mod expand;
mod rule;

use std::fmt::Write;
use std::{fmt::Write, iter};

use syntax::{ast, AstNode, NodeOrToken, SyntaxNode, WalkEvent};
use test_utils::assert_eq_text;
Expand Down Expand Up @@ -252,27 +252,36 @@ struct Struct {
let item = source_file.items().next().unwrap();
let attr = item.attrs().nth(1).unwrap();

let (tt, _) =
syntax_node_to_token_tree_censored(item.syntax(), Some(attr.syntax().text_range()));
let (tt, _) = syntax_node_to_token_tree_censored(
item.syntax(),
&iter::once(attr.syntax().clone()).collect(),
);
expect_test::expect![[r##"# [attr0] # [attr2] struct Struct {field : ()}"##]]
.assert_eq(&tt.to_string());

let source = r##"
#[attr0]
#[derive(Derive0)]
#[attr1]
#[derive(Derive1)]
#[attr2]
#[derive(Derive2)]
#[attr3]
struct Struct {
field: ()
}
"##;
let source_file = ast::SourceFile::parse(source).ok().unwrap();
let item = source_file.items().next().unwrap();
let attr = item.attrs().nth(1).unwrap();

let (tt, _) = syntax_node_to_token_tree_censored(
item.syntax(),
Some(attr.syntax().text_range().cover_offset(0.into())),
);
expect_test::expect![[r##"# [derive (Derive2)] struct Struct {field : ()}"##]]
let derive_attr_index = 3;
let censor = item
.attrs()
.take(derive_attr_index as usize + 1)
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
.map(|it| it.syntax().clone())
.collect();

let (tt, _) = syntax_node_to_token_tree_censored(item.syntax(), &censor);
expect_test::expect![[r##"# [attr0] # [attr1] # [attr2] # [derive (Derive2)] # [attr3] struct Struct {field : ()}"##]]
.assert_eq(&tt.to_string());
}
2 changes: 1 addition & 1 deletion crates/syntax/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
itertools = "0.10.0"
rowan = "0.13.0"
rowan = "0.14.0"
rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" }
rustc-hash = "1.1.0"
once_cell = "1.3.1"
Expand Down
4 changes: 2 additions & 2 deletions crates/syntax/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ pub use crate::{
ptr::{AstPtr, SyntaxNodePtr},
syntax_error::SyntaxError,
syntax_node::{
SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren, SyntaxToken,
SyntaxTreeBuilder,
PreorderWithTokens, SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren,
SyntaxToken, SyntaxTreeBuilder,
},
token_text::TokenText,
};
Expand Down
1 change: 1 addition & 0 deletions crates/syntax/src/syntax_node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ pub type SyntaxToken = rowan::SyntaxToken<RustLanguage>;
pub type SyntaxElement = rowan::SyntaxElement<RustLanguage>;
pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>;
pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>;
pub type PreorderWithTokens = rowan::api::PreorderWithTokens<RustLanguage>;

#[derive(Default)]
pub struct SyntaxTreeBuilder {
Expand Down