Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
147 changes: 85 additions & 62 deletions crates/hir/src/semantics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

mod source_to_def;

use std::{cell::RefCell, fmt, iter::successors};
use std::{cell::RefCell, fmt};

use base_db::{FileId, FileRange};
use hir_def::{
Expand All @@ -14,6 +14,7 @@ use hir_expand::{name::AsName, ExpansionInfo};
use hir_ty::{associated_type_shorthand_candidates, Interner};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
use syntax::{
algo::find_node_at_offset,
ast::{self, GenericParamsOwner, LoopBodyOwner},
Expand Down Expand Up @@ -165,7 +166,13 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
}

// FIXME: Rename to descend_into_macros_single
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
self.imp.descend_into_macros(token).pop().unwrap()
}

// FIXME: Rename to descend_into_macros
pub fn descend_into_macros_many(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
self.imp.descend_into_macros(token)
}

Expand All @@ -174,7 +181,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
node: &SyntaxNode,
offset: TextSize,
) -> Option<N> {
self.imp.descend_node_at_offset(node, offset).find_map(N::cast)
self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
}

pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
Expand Down Expand Up @@ -228,7 +235,17 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
return Some(it);
}

self.imp.descend_node_at_offset(node, offset).find_map(N::cast)
self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
}

/// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
/// descend it and find again
pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
&'slf self,
node: &SyntaxNode,
offset: TextSize,
) -> impl Iterator<Item = N> + 'slf {
self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
}

pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
Expand Down Expand Up @@ -440,87 +457,93 @@ impl<'db> SemanticsImpl<'db> {
)
}

fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let _p = profile::span("descend_into_macros");
let parent = match token.parent() {
Some(it) => it,
None => return token,
None => return smallvec![token],
};
let sa = self.analyze(&parent);

let token = successors(Some(InFile::new(sa.file_id, token)), |token| {
let mut queue = vec![InFile::new(sa.file_id, token)];
let mut cache = self.expansion_info_cache.borrow_mut();
let mut res = smallvec![];
while let Some(token) = queue.pop() {
self.db.unwind_if_cancelled();

for node in token.value.ancestors() {
match_ast! {
match node {
ast::MacroCall(macro_call) => {
let tt = macro_call.token_tree()?;
let l_delim = match tt.left_delimiter_token() {
Some(it) => it.text_range().end(),
None => tt.syntax().text_range().start()
};
let r_delim = match tt.right_delimiter_token() {
Some(it) => it.text_range().start(),
None => tt.syntax().text_range().end()
};
if !TextRange::new(l_delim, r_delim).contains_range(token.value.text_range()) {
return None;
}
let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
let token = self
.expansion_info_cache
.borrow_mut()
.entry(file_id)
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
.as_ref()?
.map_token_down(self.db.upcast(), None, token.as_ref())?;

if let Some(parent) = token.value.parent() {
self.cache(find_root(&parent), token.file_id);
}

return Some(token);
},
ast::Item(item) => {
if let Some(call_id) = self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item.clone()))) {
let file_id = call_id.as_file();
let token = self
.expansion_info_cache
.borrow_mut()
let was_not_remapped = (|| {
for node in token.value.ancestors() {
match_ast! {
match node {
ast::MacroCall(macro_call) => {
let tt = macro_call.token_tree()?;
let l_delim = match tt.left_delimiter_token() {
Some(it) => it.text_range().end(),
None => tt.syntax().text_range().start()
};
let r_delim = match tt.right_delimiter_token() {
Some(it) => it.text_range().start(),
None => tt.syntax().text_range().end()
};
if !TextRange::new(l_delim, r_delim).contains_range(token.value.text_range()) {
return None;
}
let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
let tokens = cache
.entry(file_id)
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
.as_ref()?
.map_token_down(self.db.upcast(), Some(item), token.as_ref())?;

if let Some(parent) = token.value.parent() {
self.cache(find_root(&parent), token.file_id);
.map_token_down(self.db.upcast(), None, token.as_ref())?;

let len = queue.len();
queue.extend(tokens.inspect(|token| {
if let Some(parent) = token.value.parent() {
self.cache(find_root(&parent), token.file_id);
}
}));
return (queue.len() != len).then(|| ());
},
ast::Item(item) => {
if let Some(call_id) = self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item.clone()))) {
let file_id = call_id.as_file();
let tokens = cache
.entry(file_id)
.or_insert_with(|| file_id.expansion_info(self.db.upcast()))
.as_ref()?
.map_token_down(self.db.upcast(), Some(item), token.as_ref())?;

let len = queue.len();
queue.extend(tokens.inspect(|token| {
if let Some(parent) = token.value.parent() {
self.cache(find_root(&parent), token.file_id);
}
}));
return (queue.len() != len).then(|| ());
}

return Some(token);
}
},
_ => {}
},
_ => {}
}
}
}
None
})().is_none();
if was_not_remapped {
res.push(token.value)
}

None
})
.last()
.unwrap();
token.value
}
res
}

// Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop
// traversing the inner iterator when it finds a node.
fn descend_node_at_offset(
&self,
node: &SyntaxNode,
offset: TextSize,
) -> impl Iterator<Item = SyntaxNode> + '_ {
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
// Handle macro token cases
node.token_at_offset(offset)
.map(|token| self.descend_into_macros(token))
.map(|it| self.token_ancestors_with_macros(it))
.map(move |token| self.descend_into_macros(token))
.map(|it| it.into_iter().map(move |it| self.token_ancestors_with_macros(it)))
.flatten()
}

Expand Down
2 changes: 1 addition & 1 deletion crates/hir_expand/src/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ pub fn expand_speculative(
mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?;

let token_id = macro_def.map_id_down(token_id);
let range = tmap_2.range_by_token(token_id, token_to_map.kind())?;
let range = tmap_2.first_range_by_token(token_id, token_to_map.kind())?;
let token = node.syntax_node().covering_element(range).into_token()?;
Some((node.syntax_node(), token))
}
Expand Down
2 changes: 1 addition & 1 deletion crates/hir_expand/src/hygiene.rs
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ impl HygieneInfo {
},
};

let range = token_map.range_by_token(token_id, SyntaxKind::IDENT)?;
let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?;
Some((tt.with_value(range + tt.value), origin))
}
}
Expand Down
13 changes: 7 additions & 6 deletions crates/hir_expand/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,7 @@ impl ExpansionInfo {
db: &dyn db::AstDatabase,
item: Option<ast::Item>,
token: InFile<&SyntaxToken>,
) -> Option<InFile<SyntaxToken>> {
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
assert_eq!(token.file_id, self.arg.file_id);
let token_id = if let Some(item) = item {
let call_id = match self.expanded.file_id.0 {
Expand Down Expand Up @@ -411,11 +411,12 @@ impl ExpansionInfo {
}
};

let range = self.exp_map.range_by_token(token_id, token.value.kind())?;
let tokens = self
.exp_map
.ranges_by_token(token_id, token.value.kind())
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());

let token = self.expanded.value.covering_element(range).into_token()?;

Some(self.expanded.with_value(token))
Some(tokens.map(move |token| self.expanded.with_value(token)))
}

pub fn map_token_up(
Expand Down Expand Up @@ -453,7 +454,7 @@ impl ExpansionInfo {
},
};

let range = token_map.range_by_token(token_id, token.value.kind())?;
let range = token_map.first_range_by_token(token_id, token.value.kind())?;
let token =
tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
Some((tt.with_value(token), origin))
Expand Down
Loading