Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 12 additions & 13 deletions compiler/rustc_expand/src/mbe/metavar_expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use rustc_ast_pretty::pprust;
use rustc_errors::{Applicability, PResult};
use rustc_macros::{Decodable, Encodable};
use rustc_session::parse::ParseSess;
use rustc_span::{Ident, Span, Symbol};
use rustc_span::{Ident, Span, Symbol, sym};

use crate::errors;

Expand Down Expand Up @@ -69,15 +69,15 @@ impl MetaVarExpr {
}

let mut iter = args.iter();
let rslt = match ident.as_str() {
"concat" => parse_concat(&mut iter, psess, outer_span, ident.span)?,
"count" => parse_count(&mut iter, psess, ident.span)?,
"ignore" => {
let rslt = match ident.name {
sym::concat => parse_concat(&mut iter, psess, outer_span, ident.span)?,
sym::count => parse_count(&mut iter, psess, ident.span)?,
sym::ignore => {
eat_dollar(&mut iter, psess, ident.span)?;
MetaVarExpr::Ignore(parse_ident(&mut iter, psess, ident.span)?)
}
"index" => MetaVarExpr::Index(parse_depth(&mut iter, psess, ident.span)?),
"len" => MetaVarExpr::Len(parse_depth(&mut iter, psess, ident.span)?),
sym::index => MetaVarExpr::Index(parse_depth(&mut iter, psess, ident.span)?),
sym::len => MetaVarExpr::Len(parse_depth(&mut iter, psess, ident.span)?),
_ => {
let err = errors::MveUnrecognizedExpr {
span: ident.span,
Expand Down Expand Up @@ -119,14 +119,13 @@ fn check_trailing_tokens<'psess>(
}

// `None` for max indicates the arg count must be exact, `Some` indicates a range is accepted.
let (min_or_exact_args, max_args) = match ident.as_str() {
"concat" => panic!("concat takes unlimited tokens but didn't eat them all"),
"ignore" => (1, None),
let (min_or_exact_args, max_args) = match ident.name {
sym::concat => panic!("concat takes unlimited tokens but didn't eat them all"),
sym::ignore => (1, None),
// 1 or 2 args
"count" => (1, Some(2)),
sym::count => (1, Some(2)),
// 0 or 1 arg
"index" => (0, Some(1)),
"len" => (0, Some(1)),
sym::index | sym::len => (0, Some(1)),
other => unreachable!("unknown MVEs should be rejected earlier (got `{other}`)"),
};

Expand Down
45 changes: 27 additions & 18 deletions compiler/rustc_expand/src/mbe/transcribe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -375,6 +375,19 @@ fn transcribe_metavar<'tx>(
return Ok(());
};

let MatchedSingle(pnr) = cur_matched else {
// We were unable to descend far enough. This is an error.
return Err(dcx.create_err(MacroVarStillRepeating { span: sp, ident }));
};

transcribe_pnr(tscx, sp, pnr)
}

fn transcribe_pnr<'tx>(
tscx: &mut TranscrCtx<'tx, '_>,
mut sp: Span,
pnr: &ParseNtResult,
) -> PResult<'tx, ()> {
// We wrap the tokens in invisible delimiters, unless they are already wrapped
// in invisible delimiters with the same `MetaVarKind`. Because some proc
// macros can't handle multiple layers of invisible delimiters of the same
Expand Down Expand Up @@ -404,33 +417,33 @@ fn transcribe_metavar<'tx>(
)
};

let tt = match cur_matched {
MatchedSingle(ParseNtResult::Tt(tt)) => {
let tt = match pnr {
ParseNtResult::Tt(tt) => {
// `tt`s are emitted into the output stream directly as "raw tokens",
// without wrapping them into groups. Other variables are emitted into
// the output stream as groups with `Delimiter::Invisible` to maintain
// parsing priorities.
maybe_use_metavar_location(tscx.psess, &tscx.stack, sp, tt, &mut tscx.marker)
}
MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => {
ParseNtResult::Ident(ident, is_raw) => {
tscx.marker.mark_span(&mut sp);
with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
let kind = token::NtIdent(*ident, *is_raw);
TokenTree::token_alone(kind, sp)
}
MatchedSingle(ParseNtResult::Lifetime(ident, is_raw)) => {
ParseNtResult::Lifetime(ident, is_raw) => {
tscx.marker.mark_span(&mut sp);
with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
let kind = token::NtLifetime(*ident, *is_raw);
TokenTree::token_alone(kind, sp)
}
MatchedSingle(ParseNtResult::Item(item)) => {
ParseNtResult::Item(item) => {
mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
}
MatchedSingle(ParseNtResult::Block(block)) => {
ParseNtResult::Block(block) => {
mk_delimited(block.span, MetaVarKind::Block, TokenStream::from_ast(block))
}
MatchedSingle(ParseNtResult::Stmt(stmt)) => {
ParseNtResult::Stmt(stmt) => {
let stream = if let StmtKind::Empty = stmt.kind {
// FIXME: Properly collect tokens for empty statements.
TokenStream::token_alone(token::Semi, stmt.span)
Expand All @@ -439,10 +452,10 @@ fn transcribe_metavar<'tx>(
};
mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
}
MatchedSingle(ParseNtResult::Pat(pat, pat_kind)) => {
ParseNtResult::Pat(pat, pat_kind) => {
mk_delimited(pat.span, MetaVarKind::Pat(*pat_kind), TokenStream::from_ast(pat))
}
MatchedSingle(ParseNtResult::Expr(expr, kind)) => {
ParseNtResult::Expr(expr, kind) => {
let (can_begin_literal_maybe_minus, can_begin_string_literal) = match &expr.kind {
ExprKind::Lit(_) => (true, true),
ExprKind::Unary(UnOp::Neg, e) if matches!(&e.kind, ExprKind::Lit(_)) => {
Expand All @@ -460,31 +473,27 @@ fn transcribe_metavar<'tx>(
TokenStream::from_ast(expr),
)
}
MatchedSingle(ParseNtResult::Literal(lit)) => {
ParseNtResult::Literal(lit) => {
mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit))
}
MatchedSingle(ParseNtResult::Ty(ty)) => {
ParseNtResult::Ty(ty) => {
let is_path = matches!(&ty.kind, TyKind::Path(None, _path));
mk_delimited(ty.span, MetaVarKind::Ty { is_path }, TokenStream::from_ast(ty))
}
MatchedSingle(ParseNtResult::Meta(attr_item)) => {
ParseNtResult::Meta(attr_item) => {
let has_meta_form = attr_item.meta_kind().is_some();
mk_delimited(
attr_item.span(),
MetaVarKind::Meta { has_meta_form },
TokenStream::from_ast(attr_item),
)
}
MatchedSingle(ParseNtResult::Path(path)) => {
ParseNtResult::Path(path) => {
mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path))
}
MatchedSingle(ParseNtResult::Vis(vis)) => {
ParseNtResult::Vis(vis) => {
mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
}
MatchedSeq(..) => {
// We were unable to descend far enough. This is an error.
return Err(dcx.create_err(MacroVarStillRepeating { span: sp, ident }));
}
};

tscx.result.push(tt);
Expand Down
Loading