Skip to content

Commit

Permalink
use TokenStream rather than &[TokenTree] for built-in macros
Browse files Browse the repository at this point in the history
That way, we don't loose the jointness info
  • Loading branch information
matklad committed Sep 3, 2019
1 parent b314654 commit fa893a3
Show file tree
Hide file tree
Showing 20 changed files with 109 additions and 113 deletions.
6 changes: 3 additions & 3 deletions src/doc/unstable-book/src/language-features/plugin.md
Expand Up @@ -57,12 +57,12 @@ extern crate rustc;
extern crate rustc_driver;
use syntax::parse::token::{self, Token};
use syntax::tokenstream::TokenTree;
use syntax::tokenstream::{TokenTree, TokenStream};
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
use syntax_pos::Span;
use rustc_driver::plugin::Registry;
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: TokenStream)
-> Box<dyn MacResult + 'static> {
static NUMERALS: &'static [(&'static str, usize)] = &[
Expand All @@ -78,7 +78,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
return DummyResult::any(sp);
}
let text = match args[0] {
let text = match args.into_trees().next().unwrap() {
TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
_ => {
cx.span_err(sp, "argument should be a single identifier");
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_metadata/encoder.rs
Expand Up @@ -1354,7 +1354,7 @@ impl EncodeContext<'tcx> {
let def_id = self.tcx.hir().local_def_id(macro_def.hir_id);
Entry {
kind: EntryKind::MacroDef(self.lazy(MacroDef {
body: pprust::tokens_to_string(macro_def.body.clone()),
body: pprust::tts_to_string(macro_def.body.clone()),
legacy: macro_def.legacy,
})),
visibility: self.lazy(ty::Visibility::Public),
Expand Down
46 changes: 23 additions & 23 deletions src/libsyntax/diagnostics/plugin.rs
Expand Up @@ -6,7 +6,7 @@ use crate::ext::base::{ExtCtxt, MacEager, MacResult};
use crate::parse::token::{self, Token};
use crate::ptr::P;
use crate::symbol::kw;
use crate::tokenstream::{TokenTree};
use crate::tokenstream::{TokenTree, TokenStream};

use smallvec::smallvec;
use syntax_pos::Span;
Expand All @@ -27,12 +27,11 @@ pub type ErrorMap = BTreeMap<Name, ErrorInfo>;

pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
span: Span,
token_tree: &[TokenTree])
tts: TokenStream)
-> Box<dyn MacResult+'cx> {
let code = match token_tree {
[
TokenTree::Token(Token { kind: token::Ident(code, _), .. })
] => code,
assert_eq!(tts.len(), 1);
let code = match tts.into_trees().next() {
Some(TokenTree::Token(Token { kind: token::Ident(code, _), .. })) => code,
_ => unreachable!()
};

Expand Down Expand Up @@ -62,20 +61,21 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,

pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
span: Span,
token_tree: &[TokenTree])
tts: TokenStream)
-> Box<dyn MacResult+'cx> {
let (code, description) = match token_tree {
[
TokenTree::Token(Token { kind: token::Ident(code, _), .. })
] => {
(*code, None)
},
[
TokenTree::Token(Token { kind: token::Ident(code, _), .. }),
TokenTree::Token(Token { kind: token::Comma, .. }),
TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), ..})
] => {
(*code, Some(*symbol))
assert!(tts.len() == 1 || tts.len() == 3);
let mut cursor = tts.into_trees();
let code = match cursor.next() {
Some(TokenTree::Token(Token { kind: token::Ident(code, _), .. })) => code,
_ => unreachable!()
};
let description = match (cursor.next(), cursor.next()) {
(None, None) => None,
(
Some(TokenTree::Token(Token { kind: token::Comma, .. })),
Some(TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), ..}))
) => {
Some(symbol)
},
_ => unreachable!()
};
Expand Down Expand Up @@ -121,12 +121,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,

pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
span: Span,
token_tree: &[TokenTree])
tts: TokenStream)
-> Box<dyn MacResult+'cx> {
assert_eq!(token_tree.len(), 3);
let ident = match &token_tree[2] {
assert_eq!(tts.len(), 3);
let ident = match tts.into_trees().nth(2) {
// DIAGNOSTICS ident.
&TokenTree::Token(Token { kind: token::Ident(name, _), span })
Some(TokenTree::Token(Token { kind: token::Ident(name, _), span }))
=> Ident::new(name, span),
_ => unreachable!()
};
Expand Down
27 changes: 12 additions & 15 deletions src/libsyntax/ext/base.rs
Expand Up @@ -10,7 +10,7 @@ use crate::parse::token;
use crate::ptr::P;
use crate::symbol::{kw, sym, Ident, Symbol};
use crate::{ThinVec, MACRO_ARGUMENTS};
use crate::tokenstream::{self, TokenStream, TokenTree};
use crate::tokenstream::{self, TokenStream};
use crate::visit::Visitor;

use errors::{DiagnosticBuilder, DiagnosticId};
Expand Down Expand Up @@ -235,18 +235,18 @@ pub trait TTMacroExpander {
}

pub type MacroExpanderFn =
for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, &[tokenstream::TokenTree])
for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, TokenStream)
-> Box<dyn MacResult+'cx>;

impl<F> TTMacroExpander for F
where F: for<'cx> Fn(&'cx mut ExtCtxt<'_>, Span, &[tokenstream::TokenTree])
where F: for<'cx> Fn(&'cx mut ExtCtxt<'_>, Span, TokenStream)
-> Box<dyn MacResult+'cx>
{
fn expand<'cx>(
&self,
ecx: &'cx mut ExtCtxt<'_>,
span: Span,
input: TokenStream,
mut input: TokenStream,
) -> Box<dyn MacResult+'cx> {
struct AvoidInterpolatedIdents;

Expand All @@ -268,10 +268,8 @@ impl<F> TTMacroExpander for F
mut_visit::noop_visit_mac(mac, self)
}
}

let input: Vec<_> =
input.trees().map(|mut tt| { AvoidInterpolatedIdents.visit_tt(&mut tt); tt }).collect();
(*self)(ecx, span, &input)
AvoidInterpolatedIdents.visit_tts(&mut input);
(*self)(ecx, span, input)
}
}

Expand Down Expand Up @@ -677,7 +675,7 @@ impl SyntaxExtension {
}

pub fn dummy_bang(edition: Edition) -> SyntaxExtension {
fn expander<'cx>(_: &'cx mut ExtCtxt<'_>, span: Span, _: &[TokenTree])
fn expander<'cx>(_: &'cx mut ExtCtxt<'_>, span: Span, _: TokenStream)
-> Box<dyn MacResult + 'cx> {
DummyResult::any(span)
}
Expand Down Expand Up @@ -811,9 +809,8 @@ impl<'a> ExtCtxt<'a> {
pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> {
expand::MacroExpander::new(self, true)
}

pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> {
parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect(), MACRO_ARGUMENTS)
pub fn new_parser_from_tts(&self, stream: TokenStream) -> parser::Parser<'a> {
parse::stream_to_parser(self.parse_sess, stream, MACRO_ARGUMENTS)
}
pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
Expand Down Expand Up @@ -1019,7 +1016,7 @@ pub fn expr_to_string(cx: &mut ExtCtxt<'_>, expr: P<ast::Expr>, err_msg: &str)
/// done as rarely as possible).
pub fn check_zero_tts(cx: &ExtCtxt<'_>,
sp: Span,
tts: &[tokenstream::TokenTree],
tts: TokenStream,
name: &str) {
if !tts.is_empty() {
cx.span_err(sp, &format!("{} takes no arguments", name));
Expand All @@ -1030,7 +1027,7 @@ pub fn check_zero_tts(cx: &ExtCtxt<'_>,
/// expect exactly one string literal, or emit an error and return `None`.
pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>,
sp: Span,
tts: &[tokenstream::TokenTree],
tts: TokenStream,
name: &str)
-> Option<String> {
let mut p = cx.new_parser_from_tts(tts);
Expand All @@ -1053,7 +1050,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>,
/// parsing error, emit a non-fatal error and return `None`.
pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>,
sp: Span,
tts: &[tokenstream::TokenTree]) -> Option<Vec<P<ast::Expr>>> {
tts: TokenStream) -> Option<Vec<P<ast::Expr>>> {
let mut p = cx.new_parser_from_tts(tts);
let mut es = Vec::new();
while p.token != token::Eof {
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/expand.rs
Expand Up @@ -701,7 +701,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
path: &Path,
span: Span,
) -> AstFragment {
let mut parser = self.cx.new_parser_from_tts(&toks.into_trees().collect::<Vec<_>>());
let mut parser = self.cx.new_parser_from_tts(toks);
match parser.parse_ast_fragment(kind, false) {
Ok(fragment) => {
parser.ensure_complete_parse(path, kind.name(), span);
Expand Down
6 changes: 1 addition & 5 deletions src/libsyntax/print/pprust.rs
Expand Up @@ -356,11 +356,7 @@ pub fn tt_to_string(tt: tokenstream::TokenTree) -> String {
to_string(|s| s.print_tt(tt, false))
}

pub fn tts_to_string(tts: &[tokenstream::TokenTree]) -> String {
tokens_to_string(tts.iter().cloned().collect())
}

pub fn tokens_to_string(tokens: TokenStream) -> String {
pub fn tts_to_string(tokens: TokenStream) -> String {
to_string(|s| s.print_tts(tokens, false))
}

Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/tokenstream.rs
Expand Up @@ -506,7 +506,7 @@ impl Cursor {

impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&pprust::tokens_to_string(self.clone()))
f.write_str(&pprust::tts_to_string(self.clone()))
}
}

Expand Down
24 changes: 10 additions & 14 deletions src/libsyntax_ext/asm.rs
Expand Up @@ -8,13 +8,12 @@ use errors::DiagnosticBuilder;

use syntax::ast;
use syntax::ext::base::{self, *};
use syntax::parse;
use syntax::parse::token::{self, Token};
use syntax::ptr::P;
use syntax::symbol::{kw, sym, Symbol};
use syntax::ast::AsmDialect;
use syntax_pos::Span;
use syntax::tokenstream;
use syntax::tokenstream::{self, TokenStream};
use syntax::{span_err, struct_span_err};

enum State {
Expand Down Expand Up @@ -43,7 +42,7 @@ const OPTIONS: &[Symbol] = &[sym::volatile, sym::alignstack, sym::intel];

pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt<'_>,
sp: Span,
tts: &[tokenstream::TokenTree])
tts: TokenStream)
-> Box<dyn base::MacResult + 'cx> {
let mut inline_asm = match parse_inline_asm(cx, sp, tts) {
Ok(Some(inline_asm)) => inline_asm,
Expand Down Expand Up @@ -71,20 +70,20 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt<'_>,
fn parse_inline_asm<'a>(
cx: &mut ExtCtxt<'a>,
sp: Span,
tts: &[tokenstream::TokenTree],
tts: TokenStream,
) -> Result<Option<ast::InlineAsm>, DiagnosticBuilder<'a>> {
// Split the tts before the first colon, to avoid `asm!("x": y)` being
// parsed as `asm!(z)` with `z = "x": y` which is type ascription.
let first_colon = tts.iter()
let first_colon = tts.trees()
.position(|tt| {
match *tt {
match tt {
tokenstream::TokenTree::Token(Token { kind: token::Colon, .. }) |
tokenstream::TokenTree::Token(Token { kind: token::ModSep, .. }) => true,
_ => false,
}
})
.unwrap_or(tts.len());
let mut p = cx.new_parser_from_tts(&tts[first_colon..]);
let mut p = cx.new_parser_from_tts(tts.trees().skip(first_colon).collect());
let mut asm = kw::Invalid;
let mut asm_str_style = None;
let mut outputs = Vec::new();
Expand All @@ -110,7 +109,8 @@ fn parse_inline_asm<'a>(
));
}
// Nested parser, stop before the first colon (see above).
let mut p2 = cx.new_parser_from_tts(&tts[..first_colon]);
let mut p2 =
cx.new_parser_from_tts(tts.trees().take(first_colon).collect());

if p2.token == token::Eof {
let mut err =
Expand All @@ -129,12 +129,8 @@ fn parse_inline_asm<'a>(
// This is most likely malformed.
if p2.token != token::Eof {
let mut extra_tts = p2.parse_all_token_trees()?;
extra_tts.extend(tts[first_colon..].iter().cloned());
p = parse::stream_to_parser(
cx.parse_sess,
extra_tts.into_iter().collect(),
Some("inline assembly"),
);
extra_tts.extend(tts.trees().skip(first_colon));
p = cx.new_parser_from_tts(extra_tts.into_iter().collect());
}

asm = s;
Expand Down
6 changes: 3 additions & 3 deletions src/libsyntax_ext/assert.rs
Expand Up @@ -13,7 +13,7 @@ use syntax_pos::{Span, DUMMY_SP};
pub fn expand_assert<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
tts: &[TokenTree],
tts: TokenStream,
) -> Box<dyn MacResult + 'cx> {
let Assert { cond_expr, custom_message } = match parse_assert(cx, sp, tts) {
Ok(assert) => assert,
Expand Down Expand Up @@ -59,9 +59,9 @@ struct Assert {
fn parse_assert<'a>(
cx: &mut ExtCtxt<'a>,
sp: Span,
tts: &[TokenTree]
stream: TokenStream
) -> Result<Assert, DiagnosticBuilder<'a>> {
let mut parser = cx.new_parser_from_tts(tts);
let mut parser = cx.new_parser_from_tts(stream);

if parser.token == token::Eof {
let mut err = cx.struct_span_err(sp, "macro requires a boolean expression as an argument");
Expand Down
6 changes: 3 additions & 3 deletions src/libsyntax_ext/cfg.rs
Expand Up @@ -7,14 +7,14 @@ use errors::DiagnosticBuilder;
use syntax::ast;
use syntax::ext::base::{self, *};
use syntax::attr;
use syntax::tokenstream;
use syntax::tokenstream::TokenStream;
use syntax::parse::token;
use syntax_pos::Span;

pub fn expand_cfg(
cx: &mut ExtCtxt<'_>,
sp: Span,
tts: &[tokenstream::TokenTree],
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let sp = cx.with_legacy_ctxt(sp);

Expand All @@ -33,7 +33,7 @@ pub fn expand_cfg(
fn parse_cfg<'a>(
cx: &mut ExtCtxt<'a>,
sp: Span,
tts: &[tokenstream::TokenTree],
tts: TokenStream,
) -> Result<ast::MetaItem, DiagnosticBuilder<'a>> {
let mut p = cx.new_parser_from_tts(tts);

Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax_ext/compile_error.rs
Expand Up @@ -2,11 +2,11 @@

use syntax::ext::base::{self, *};
use syntax_pos::Span;
use syntax::tokenstream;
use syntax::tokenstream::TokenStream;

pub fn expand_compile_error<'cx>(cx: &'cx mut ExtCtxt<'_>,
sp: Span,
tts: &[tokenstream::TokenTree])
tts: TokenStream)
-> Box<dyn base::MacResult + 'cx> {
let var = match get_single_str_from_tts(cx, sp, tts, "compile_error!") {
None => return DummyResult::any(sp),
Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax_ext/concat.rs
@@ -1,14 +1,14 @@
use syntax::ast;
use syntax::ext::base::{self, DummyResult};
use syntax::symbol::Symbol;
use syntax::tokenstream;
use syntax::tokenstream::TokenStream;

use std::string::String;

pub fn expand_syntax_ext(
cx: &mut base::ExtCtxt<'_>,
sp: syntax_pos::Span,
tts: &[tokenstream::TokenTree],
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let es = match base::get_exprs_from_tts(cx, sp, tts) {
Some(e) => e,
Expand Down

0 comments on commit fa893a3

Please sign in to comment.