Skip to content

Commit

Permalink
Properly handle attributes on statements
Browse files Browse the repository at this point in the history
We now collect tokens for the underlying node wrapped by `StmtKind`
instead of storing tokens directly in `Stmt`.

`LazyTokenStream` now supports capturing a trailing semicolon after it
is initially constructed. This allows us to avoid refactoring statement
parsing to wrap the parsing of the semicolon in `parse_tokens`.

Attributes on item statements
(e.g. `fn foo() { #[bar] struct MyStruct; }`) are now treated as
item attributes, not statement attributes, which is consistent with how
we handle attributes on other kinds of statements. The feature-gating
code is adjusted so that proc-macro attributes are still allowed on item
statements on stable.

Two built-in macros (`#[global_allocator]` and `#[test]`) needed to be
adjusted to support being passed `Annotatable::Stmt`.
  • Loading branch information
Aaron1011 committed Nov 26, 2020
1 parent 72da5a9 commit de88bf1
Show file tree
Hide file tree
Showing 20 changed files with 484 additions and 186 deletions.
48 changes: 43 additions & 5 deletions compiler/rustc_ast/src/ast.rs
Expand Up @@ -901,29 +901,65 @@ pub struct Stmt {
pub id: NodeId,
pub kind: StmtKind,
pub span: Span,
pub tokens: Option<LazyTokenStream>,
}

impl Stmt {
pub fn tokens(&self) -> Option<&LazyTokenStream> {
match self.kind {
StmtKind::Local(ref local) => local.tokens.as_ref(),
StmtKind::Item(ref item) => item.tokens.as_ref(),
StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => expr.tokens.as_ref(),
StmtKind::Empty => None,
StmtKind::MacCall(ref mac) => mac.tokens.as_ref(),
}
}

pub fn tokens_mut(&mut self) -> Option<&mut LazyTokenStream> {
match self.kind {
StmtKind::Local(ref mut local) => local.tokens.as_mut(),
StmtKind::Item(ref mut item) => item.tokens.as_mut(),
StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens.as_mut(),
StmtKind::Empty => None,
StmtKind::MacCall(ref mut mac) => mac.tokens.as_mut(),
}
}

pub fn set_tokens(&mut self, tokens: Option<LazyTokenStream>) {
match self.kind {
StmtKind::Local(ref mut local) => local.tokens = tokens,
StmtKind::Item(ref mut item) => item.tokens = tokens,
StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens = tokens,
StmtKind::Empty => {}
StmtKind::MacCall(ref mut mac) => mac.tokens = tokens,
}
}

pub fn has_trailing_semicolon(&self) -> bool {
match &self.kind {
StmtKind::Semi(_) => true,
StmtKind::MacCall(mac) => matches!(mac.style, MacStmtStyle::Semicolon),
_ => false,
}
}

/// Converts a parsed `Stmt` to a `Stmt` with
/// a trailing semicolon.
///
/// This only modifies the parsed AST struct, not the attached
/// `LazyTokenStream`. The parser is responsible for calling
/// `CreateTokenStream::add_trailing_semi` when there is actually
/// a semicolon in the tokenstream.
pub fn add_trailing_semicolon(mut self) -> Self {
self.kind = match self.kind {
StmtKind::Expr(expr) => StmtKind::Semi(expr),
StmtKind::MacCall(mac) => {
StmtKind::MacCall(mac.map(|MacCallStmt { mac, style: _, attrs }| MacCallStmt {
mac,
style: MacStmtStyle::Semicolon,
attrs,
StmtKind::MacCall(mac.map(|MacCallStmt { mac, style: _, attrs, tokens }| {
MacCallStmt { mac, style: MacStmtStyle::Semicolon, attrs, tokens }
}))
}
kind => kind,
};

self
}

Expand Down Expand Up @@ -963,6 +999,7 @@ pub struct MacCallStmt {
pub mac: MacCall,
pub style: MacStmtStyle,
pub attrs: AttrVec,
pub tokens: Option<LazyTokenStream>,
}

#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug)]
Expand All @@ -988,6 +1025,7 @@ pub struct Local {
pub init: Option<P<Expr>>,
pub span: Span,
pub attrs: AttrVec,
pub tokens: Option<LazyTokenStream>,
}

/// An arm of a 'match'.
Expand Down
14 changes: 6 additions & 8 deletions compiler/rustc_ast/src/mut_visit.rs
Expand Up @@ -576,13 +576,14 @@ pub fn noop_visit_parenthesized_parameter_data<T: MutVisitor>(
}

pub fn noop_visit_local<T: MutVisitor>(local: &mut P<Local>, vis: &mut T) {
let Local { id, pat, ty, init, span, attrs } = local.deref_mut();
let Local { id, pat, ty, init, span, attrs, tokens } = local.deref_mut();
vis.visit_id(id);
vis.visit_pat(pat);
visit_opt(ty, |ty| vis.visit_ty(ty));
visit_opt(init, |init| vis.visit_expr(init));
vis.visit_span(span);
visit_thin_attrs(attrs, vis);
visit_lazy_tts(tokens, vis);
}

pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
Expand Down Expand Up @@ -1325,16 +1326,12 @@ pub fn noop_filter_map_expr<T: MutVisitor>(mut e: P<Expr>, vis: &mut T) -> Optio
}

pub fn noop_flat_map_stmt<T: MutVisitor>(
Stmt { kind, mut span, mut id, mut tokens }: Stmt,
Stmt { kind, mut span, mut id }: Stmt,
vis: &mut T,
) -> SmallVec<[Stmt; 1]> {
vis.visit_id(&mut id);
vis.visit_span(&mut span);
visit_lazy_tts(&mut tokens, vis);
noop_flat_map_stmt_kind(kind, vis)
.into_iter()
.map(|kind| Stmt { id, kind, span, tokens: tokens.clone() })
.collect()
noop_flat_map_stmt_kind(kind, vis).into_iter().map(|kind| Stmt { id, kind, span }).collect()
}

pub fn noop_flat_map_stmt_kind<T: MutVisitor>(
Expand All @@ -1351,9 +1348,10 @@ pub fn noop_flat_map_stmt_kind<T: MutVisitor>(
StmtKind::Semi(expr) => vis.filter_map_expr(expr).into_iter().map(StmtKind::Semi).collect(),
StmtKind::Empty => smallvec![StmtKind::Empty],
StmtKind::MacCall(mut mac) => {
let MacCallStmt { mac: mac_, style: _, attrs } = mac.deref_mut();
let MacCallStmt { mac: mac_, style: _, attrs, tokens } = mac.deref_mut();
vis.visit_mac_call(mac_);
visit_thin_attrs(attrs, vis);
visit_lazy_tts(tokens, vis);
smallvec![StmtKind::MacCall(mac)]
}
}
Expand Down
11 changes: 11 additions & 0 deletions compiler/rustc_ast/src/tokenstream.rs
Expand Up @@ -121,10 +121,14 @@ where
}

pub trait CreateTokenStream: sync::Send + sync::Sync {
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream>;
fn create_token_stream(&self) -> TokenStream;
}

impl CreateTokenStream for TokenStream {
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream> {
panic!("Cannot call `add_trailing_semi` on a `TokenStream`!");
}
fn create_token_stream(&self) -> TokenStream {
self.clone()
}
Expand All @@ -141,6 +145,13 @@ impl LazyTokenStream {
LazyTokenStream(Lrc::new(Box::new(inner)))
}

/// Extends the captured stream by one token,
/// which must be a trailing semicolon. This
/// affects the `TokenStream` created by `make_tokenstream`.
pub fn add_trailing_semi(&self) -> LazyTokenStream {
LazyTokenStream(Lrc::new(self.0.add_trailing_semi()))
}

pub fn create_token_stream(&self) -> TokenStream {
self.0.create_token_stream()
}
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_ast/src/visit.rs
Expand Up @@ -686,7 +686,7 @@ pub fn walk_stmt<'a, V: Visitor<'a>>(visitor: &mut V, statement: &'a Stmt) {
StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => visitor.visit_expr(expr),
StmtKind::Empty => {}
StmtKind::MacCall(ref mac) => {
let MacCallStmt { ref mac, style: _, ref attrs } = **mac;
let MacCallStmt { ref mac, style: _, ref attrs, tokens: _ } = **mac;
visitor.visit_mac_call(mac);
for attr in attrs.iter() {
visitor.visit_attribute(attr);
Expand Down
3 changes: 2 additions & 1 deletion compiler/rustc_builtin_macros/src/deriving/debug.rs
Expand Up @@ -132,6 +132,7 @@ fn stmt_let_underscore(cx: &mut ExtCtxt<'_>, sp: Span, expr: P<ast::Expr>) -> as
id: ast::DUMMY_NODE_ID,
span: sp,
attrs: ast::AttrVec::new(),
tokens: None,
});
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp, tokens: None }
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp }
}
1 change: 0 additions & 1 deletion compiler/rustc_builtin_macros/src/deriving/mod.rs
Expand Up @@ -64,7 +64,6 @@ impl MultiItemModifier for BuiltinDerive {
id: ast::DUMMY_NODE_ID,
kind: ast::StmtKind::Item(a.expect_item()),
span,
tokens: None,
})));
});
} else {
Expand Down
2 changes: 0 additions & 2 deletions compiler/rustc_expand/src/base.rs
Expand Up @@ -374,7 +374,6 @@ macro_rules! make_stmts_default {
id: ast::DUMMY_NODE_ID,
span: e.span,
kind: ast::StmtKind::Expr(e),
tokens: None
}]
})
};
Expand Down Expand Up @@ -617,7 +616,6 @@ impl MacResult for DummyResult {
id: ast::DUMMY_NODE_ID,
kind: ast::StmtKind::Expr(DummyResult::raw_expr(self.span, self.is_error)),
span: self.span,
tokens: None
}])
}

Expand Down
26 changes: 6 additions & 20 deletions compiler/rustc_expand/src/build.rs
Expand Up @@ -140,12 +140,7 @@ impl<'a> ExtCtxt<'a> {
}

pub fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt {
ast::Stmt {
id: ast::DUMMY_NODE_ID,
span: expr.span,
kind: ast::StmtKind::Expr(expr),
tokens: None,
}
ast::Stmt { id: ast::DUMMY_NODE_ID, span: expr.span, kind: ast::StmtKind::Expr(expr) }
}

pub fn stmt_let(&self, sp: Span, mutbl: bool, ident: Ident, ex: P<ast::Expr>) -> ast::Stmt {
Expand All @@ -162,13 +157,9 @@ impl<'a> ExtCtxt<'a> {
id: ast::DUMMY_NODE_ID,
span: sp,
attrs: AttrVec::new(),
});
ast::Stmt {
id: ast::DUMMY_NODE_ID,
kind: ast::StmtKind::Local(local),
span: sp,
tokens: None,
}
});
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp }
}

// Generates `let _: Type;`, which is usually used for type assertions.
Expand All @@ -180,17 +171,13 @@ impl<'a> ExtCtxt<'a> {
id: ast::DUMMY_NODE_ID,
span,
attrs: AttrVec::new(),
tokens: None,
});
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span, tokens: None }
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span }
}

pub fn stmt_item(&self, sp: Span, item: P<ast::Item>) -> ast::Stmt {
ast::Stmt {
id: ast::DUMMY_NODE_ID,
kind: ast::StmtKind::Item(item),
span: sp,
tokens: None,
}
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Item(item), span: sp }
}

pub fn block_expr(&self, expr: P<ast::Expr>) -> P<ast::Block> {
Expand All @@ -200,7 +187,6 @@ impl<'a> ExtCtxt<'a> {
id: ast::DUMMY_NODE_ID,
span: expr.span,
kind: ast::StmtKind::Expr(expr),
tokens: None,
}],
)
}
Expand Down
12 changes: 3 additions & 9 deletions compiler/rustc_expand/src/expand.rs
Expand Up @@ -1274,12 +1274,6 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
// we'll expand attributes on expressions separately
if !stmt.is_expr() {
let attr = if stmt.is_item() {
// FIXME: Implement proper token collection for statements
if let StmtKind::Item(item) = &mut stmt.kind {
stmt.tokens = item.tokens.take()
} else {
unreachable!()
};
self.take_first_attr(&mut stmt)
} else {
// Ignore derives on non-item statements for backwards compatibility.
Expand All @@ -1295,7 +1289,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
}

if let StmtKind::MacCall(mac) = stmt.kind {
let MacCallStmt { mac, style, attrs } = mac.into_inner();
let MacCallStmt { mac, style, attrs, tokens: _ } = mac.into_inner();
self.check_attributes(&attrs);
let mut placeholder =
self.collect_bang(mac, stmt.span, AstFragmentKind::Stmts).make_stmts();
Expand All @@ -1312,10 +1306,10 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
}

// The placeholder expander gives ids to statements, so we avoid folding the id here.
let ast::Stmt { id, kind, span, tokens } = stmt;
let ast::Stmt { id, kind, span } = stmt;
noop_flat_map_stmt_kind(kind, self)
.into_iter()
.map(|kind| ast::Stmt { id, kind, span, tokens: tokens.clone() })
.map(|kind| ast::Stmt { id, kind, span })
.collect()
}

Expand Down
11 changes: 4 additions & 7 deletions compiler/rustc_expand/src/placeholders.rs
Expand Up @@ -104,8 +104,9 @@ pub fn placeholder(
mac: mac_placeholder(),
style: ast::MacStmtStyle::Braces,
attrs: ast::AttrVec::new(),
tokens: None,
});
ast::Stmt { id, span, kind: ast::StmtKind::MacCall(mac), tokens: None }
ast::Stmt { id, span, kind: ast::StmtKind::MacCall(mac) }
}]),
AstFragmentKind::Arms => AstFragment::Arms(smallvec![ast::Arm {
attrs: Default::default(),
Expand Down Expand Up @@ -331,12 +332,8 @@ impl<'a, 'b> MutVisitor for PlaceholderExpander<'a, 'b> {

// FIXME: We will need to preserve the original semicolon token and
// span as part of #15701
let empty_stmt = ast::Stmt {
id: ast::DUMMY_NODE_ID,
kind: ast::StmtKind::Empty,
span: DUMMY_SP,
tokens: None,
};
let empty_stmt =
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Empty, span: DUMMY_SP };

if let Some(stmt) = stmts.pop() {
if stmt.has_trailing_semicolon() {
Expand Down
2 changes: 0 additions & 2 deletions compiler/rustc_interface/src/util.rs
Expand Up @@ -810,7 +810,6 @@ impl<'a> MutVisitor for ReplaceBodyWithLoop<'a, '_> {
id: resolver.next_node_id(),
kind: ast::StmtKind::Expr(expr),
span: rustc_span::DUMMY_SP,
tokens: None,
}
}

Expand All @@ -827,7 +826,6 @@ impl<'a> MutVisitor for ReplaceBodyWithLoop<'a, '_> {
id: self.resolver.next_node_id(),
span: rustc_span::DUMMY_SP,
kind: ast::StmtKind::Expr(loop_expr),
tokens: None,
};

if self.within_static_or_const {
Expand Down
23 changes: 9 additions & 14 deletions compiler/rustc_parse/src/lib.rs
Expand Up @@ -6,6 +6,7 @@
#![feature(or_patterns)]

use rustc_ast as ast;
use rustc_ast::attr::HasAttrs;
use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{self, LazyTokenStream, TokenStream, TokenTree};
use rustc_ast_pretty::pprust;
Expand Down Expand Up @@ -249,29 +250,23 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
// before we fall back to the stringification.

let convert_tokens =
|tokens: &Option<LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream());
|tokens: Option<&LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream());

let tokens = match *nt {
Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()),
Nonterminal::NtBlock(ref block) => convert_tokens(&block.tokens),
Nonterminal::NtStmt(ref stmt) => {
// FIXME: We currently only collect tokens for `:stmt`
// matchers in `macro_rules!` macros. When we start collecting
// tokens for attributes on statements, we will need to prepend
// attributes here
convert_tokens(&stmt.tokens)
}
Nonterminal::NtPat(ref pat) => convert_tokens(&pat.tokens),
Nonterminal::NtTy(ref ty) => convert_tokens(&ty.tokens),
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()),
Nonterminal::NtStmt(ref stmt) => prepend_attrs(stmt.attrs(), stmt.tokens()),
Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()),
Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.as_ref()),
Nonterminal::NtIdent(ident, is_raw) => {
Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into())
}
Nonterminal::NtLifetime(ident) => {
Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into())
}
Nonterminal::NtMeta(ref attr) => convert_tokens(&attr.tokens),
Nonterminal::NtPath(ref path) => convert_tokens(&path.tokens),
Nonterminal::NtVis(ref vis) => convert_tokens(&vis.tokens),
Nonterminal::NtMeta(ref attr) => convert_tokens(attr.tokens.as_ref()),
Nonterminal::NtPath(ref path) => convert_tokens(path.tokens.as_ref()),
Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.as_ref()),
Nonterminal::NtTT(ref tt) => Some(tt.clone().into()),
Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => {
if expr.tokens.is_none() {
Expand Down

0 comments on commit de88bf1

Please sign in to comment.