From eac3846b65b068a5cbdfafc786e258554b875dae Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Fri, 24 May 2019 02:04:56 +0300 Subject: [PATCH 01/14] Always use token kinds through `token` module rather than `Token` type --- src/librustc/hir/lowering.rs | 4 +- src/librustc/hir/map/def_collector.rs | 2 +- src/librustc/ich/impls_syntax.rs | 82 ++++++++++----------- src/librustc_resolve/build_reduced_graph.rs | 2 +- src/libsyntax/attr/mod.rs | 22 +++--- src/libsyntax/ext/expand.rs | 4 +- src/libsyntax/ext/tt/macro_parser.rs | 40 +++++----- src/libsyntax/ext/tt/transcribe.rs | 2 +- src/libsyntax/parse/diagnostics.rs | 10 +-- src/libsyntax/parse/mod.rs | 2 +- src/libsyntax/parse/parser.rs | 8 +- src/libsyntax/parse/token.rs | 4 +- src/libsyntax/tokenstream.rs | 24 +++--- src/libsyntax/util/parser.rs | 50 ++++++------- src/libsyntax_ext/deriving/custom.rs | 4 +- 15 files changed, 130 insertions(+), 130 deletions(-) diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index d6ad335525c14..089e5de01a21f 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -67,7 +67,7 @@ use syntax::source_map::CompilerDesugaringKind::IfTemporary; use syntax::std_inject; use syntax::symbol::{kw, sym, Symbol}; use syntax::tokenstream::{TokenStream, TokenTree}; -use syntax::parse::token::Token; +use syntax::parse::token::{self, Token}; use syntax::visit::{self, Visitor}; use syntax_pos::{DUMMY_SP, edition, Span}; @@ -1339,7 +1339,7 @@ impl<'a> LoweringContext<'a> { fn lower_token(&mut self, token: Token, span: Span) -> TokenStream { match token { - Token::Interpolated(nt) => { + token::Interpolated(nt) => { let tts = nt.to_tokenstream(&self.sess.parse_sess, span); self.lower_token_stream(tts) } diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index a4484c8173898..0fa0d1ea00c95 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -326,7 +326,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { } fn visit_token(&mut self, t: Token) { - if let Token::Interpolated(nt) = t { + if let token::Interpolated(nt) = t { if let token::NtExpr(ref expr) = *nt { if let ExprKind::Mac(..) = expr.node { self.visit_macro_invoc(expr.id); diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index 0cdd9a863ccb8..6e1eba0af56f9 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -313,60 +313,60 @@ fn hash_token<'a, 'gcx, W: StableHasherResult>( ) { mem::discriminant(token).hash_stable(hcx, hasher); match *token { - token::Token::Eq | - token::Token::Lt | - token::Token::Le | - token::Token::EqEq | - token::Token::Ne | - token::Token::Ge | - token::Token::Gt | - token::Token::AndAnd | - token::Token::OrOr | - token::Token::Not | - token::Token::Tilde | - token::Token::At | - token::Token::Dot | - token::Token::DotDot | - token::Token::DotDotDot | - token::Token::DotDotEq | - token::Token::Comma | - token::Token::Semi | - token::Token::Colon | - token::Token::ModSep | - token::Token::RArrow | - token::Token::LArrow | - token::Token::FatArrow | - token::Token::Pound | - token::Token::Dollar | - token::Token::Question | - token::Token::SingleQuote | - token::Token::Whitespace | - token::Token::Comment | - token::Token::Eof => {} - - token::Token::BinOp(bin_op_token) | - token::Token::BinOpEq(bin_op_token) => { + token::Eq | + token::Lt | + token::Le | + token::EqEq | + token::Ne | + token::Ge | + token::Gt | + token::AndAnd | + token::OrOr | + token::Not | + token::Tilde | + token::At | + token::Dot | + token::DotDot | + token::DotDotDot | + token::DotDotEq | + token::Comma | + token::Semi | + token::Colon | + token::ModSep | + token::RArrow | + token::LArrow | + token::FatArrow | + token::Pound | + token::Dollar | + token::Question | + token::SingleQuote | + token::Whitespace | + token::Comment | + token::Eof => {} + + token::BinOp(bin_op_token) | + token::BinOpEq(bin_op_token) => { std_hash::Hash::hash(&bin_op_token, hasher); } - token::Token::OpenDelim(delim_token) | - token::Token::CloseDelim(delim_token) => { + token::OpenDelim(delim_token) | + token::CloseDelim(delim_token) => { std_hash::Hash::hash(&delim_token, hasher); } - token::Token::Literal(lit) => lit.hash_stable(hcx, hasher), + token::Literal(lit) => lit.hash_stable(hcx, hasher), - token::Token::Ident(ident, is_raw) => { + token::Ident(ident, is_raw) => { ident.name.hash_stable(hcx, hasher); is_raw.hash_stable(hcx, hasher); } - token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher), + token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher), - token::Token::Interpolated(_) => { + token::Interpolated(_) => { bug!("interpolated tokens should not be present in the HIR") } - token::Token::DocComment(val) | - token::Token::Shebang(val) => val.hash_stable(hcx, hasher), + token::DocComment(val) | + token::Shebang(val) => val.hash_stable(hcx, hasher), } } diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 92faab192fa9e..76279cc028341 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -1053,7 +1053,7 @@ impl<'a, 'b> Visitor<'a> for BuildReducedGraphVisitor<'a, 'b> { } fn visit_token(&mut self, t: Token) { - if let Token::Interpolated(nt) = t { + if let token::Interpolated(nt) = t { if let token::NtExpr(ref expr) = *nt { if let ast::ExprKind::Mac(..) = expr.node { self.visit_invoc(expr.id); diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 48948e4d0d79c..c57510ab1a0be 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -465,7 +465,7 @@ impl MetaItem { let mod_sep_span = Span::new(last_pos, segment.ident.span.lo(), segment.ident.span.ctxt()); - idents.push(TokenTree::Token(mod_sep_span, Token::ModSep).into()); + idents.push(TokenTree::Token(mod_sep_span, token::ModSep).into()); } idents.push(TokenTree::Token(segment.ident.span, Token::from_ast_ident(segment.ident)).into()); @@ -480,10 +480,10 @@ impl MetaItem { { // FIXME: Share code with `parse_path`. let path = match tokens.next() { - Some(TokenTree::Token(span, token @ Token::Ident(..))) | - Some(TokenTree::Token(span, token @ Token::ModSep)) => 'arm: { - let mut segments = if let Token::Ident(ident, _) = token { - if let Some(TokenTree::Token(_, Token::ModSep)) = tokens.peek() { + Some(TokenTree::Token(span, token @ token::Ident(..))) | + Some(TokenTree::Token(span, token @ token::ModSep)) => 'arm: { + let mut segments = if let token::Ident(ident, _) = token { + if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() { tokens.next(); vec![PathSegment::from_ident(ident.with_span_pos(span))] } else { @@ -494,12 +494,12 @@ impl MetaItem { }; loop { if let Some(TokenTree::Token(span, - Token::Ident(ident, _))) = tokens.next() { + token::Ident(ident, _))) = tokens.next() { segments.push(PathSegment::from_ident(ident.with_span_pos(span))); } else { return None; } - if let Some(TokenTree::Token(_, Token::ModSep)) = tokens.peek() { + if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() { tokens.next(); } else { break; @@ -508,7 +508,7 @@ impl MetaItem { let span = span.with_hi(segments.last().unwrap().ident.span.hi()); Path { span, segments } } - Some(TokenTree::Token(_, Token::Interpolated(nt))) => match *nt { + Some(TokenTree::Token(_, token::Interpolated(nt))) => match *nt { token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident), token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()), token::Nonterminal::NtPath(ref path) => path.clone(), @@ -533,7 +533,7 @@ impl MetaItemKind { match *self { MetaItemKind::Word => TokenStream::empty(), MetaItemKind::NameValue(ref lit) => { - let mut vec = vec![TokenTree::Token(span, Token::Eq).into()]; + let mut vec = vec![TokenTree::Token(span, token::Eq).into()]; lit.tokens().append_to_tree_and_joint_vec(&mut vec); TokenStream::new(vec) } @@ -541,7 +541,7 @@ impl MetaItemKind { let mut tokens = Vec::new(); for (i, item) in list.iter().enumerate() { if i > 0 { - tokens.push(TokenTree::Token(span, Token::Comma).into()); + tokens.push(TokenTree::Token(span, token::Comma).into()); } item.tokens().append_to_tree_and_joint_vec(&mut tokens); } @@ -579,7 +579,7 @@ impl MetaItemKind { let item = NestedMetaItem::from_tokens(&mut tokens)?; result.push(item); match tokens.next() { - None | Some(TokenTree::Token(_, Token::Comma)) => {} + None | Some(TokenTree::Token(_, token::Comma)) => {} _ => return None, } } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index c2a73b662c680..7b158b65d1562 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -10,7 +10,7 @@ use crate::ext::placeholders::{placeholder, PlaceholderExpander}; use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err}; use crate::mut_visit::*; use crate::parse::{DirectoryOwnership, PResult, ParseSess}; -use crate::parse::token::{self, Token}; +use crate::parse::token; use crate::parse::parser::Parser; use crate::ptr::P; use crate::symbol::Symbol; @@ -585,7 +585,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } AttrProcMacro(ref mac, ..) => { self.gate_proc_macro_attr_item(attr.span, &item); - let item_tok = TokenTree::Token(DUMMY_SP, Token::Interpolated(Lrc::new(match item { + let item_tok = TokenTree::Token(DUMMY_SP, token::Interpolated(Lrc::new(match item { Annotatable::Item(item) => token::NtItem(item), Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()), Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()), diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 7b7cf80760f5c..473a5f414dfa8 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -835,12 +835,12 @@ fn may_begin_with(name: Symbol, token: &Token) -> bool { sym::literal => token.can_begin_literal_or_bool(), sym::vis => match *token { // The follow-set of :vis + "priv" keyword + interpolated - Token::Comma | Token::Ident(..) | Token::Interpolated(_) => true, + token::Comma | token::Ident(..) | token::Interpolated(_) => true, _ => token.can_begin_type(), }, sym::block => match *token { - Token::OpenDelim(token::Brace) => true, - Token::Interpolated(ref nt) => match **nt { + token::OpenDelim(token::Brace) => true, + token::Interpolated(ref nt) => match **nt { token::NtItem(_) | token::NtPat(_) | token::NtTy(_) @@ -853,32 +853,32 @@ fn may_begin_with(name: Symbol, token: &Token) -> bool { _ => false, }, sym::path | sym::meta => match *token { - Token::ModSep | Token::Ident(..) => true, - Token::Interpolated(ref nt) => match **nt { + token::ModSep | token::Ident(..) => true, + token::Interpolated(ref nt) => match **nt { token::NtPath(_) | token::NtMeta(_) => true, _ => may_be_ident(&nt), }, _ => false, }, sym::pat => match *token { - Token::Ident(..) | // box, ref, mut, and other identifiers (can stricten) - Token::OpenDelim(token::Paren) | // tuple pattern - Token::OpenDelim(token::Bracket) | // slice pattern - Token::BinOp(token::And) | // reference - Token::BinOp(token::Minus) | // negative literal - Token::AndAnd | // double reference - Token::Literal(..) | // literal - Token::DotDot | // range pattern (future compat) - Token::DotDotDot | // range pattern (future compat) - Token::ModSep | // path - Token::Lt | // path (UFCS constant) - Token::BinOp(token::Shl) => true, // path (double UFCS) - Token::Interpolated(ref nt) => may_be_ident(nt), + token::Ident(..) | // box, ref, mut, and other identifiers (can stricten) + token::OpenDelim(token::Paren) | // tuple pattern + token::OpenDelim(token::Bracket) | // slice pattern + token::BinOp(token::And) | // reference + token::BinOp(token::Minus) | // negative literal + token::AndAnd | // double reference + token::Literal(..) | // literal + token::DotDot | // range pattern (future compat) + token::DotDotDot | // range pattern (future compat) + token::ModSep | // path + token::Lt | // path (UFCS constant) + token::BinOp(token::Shl) => true, // path (double UFCS) + token::Interpolated(ref nt) => may_be_ident(nt), _ => false, }, sym::lifetime => match *token { - Token::Lifetime(_) => true, - Token::Interpolated(ref nt) => match **nt { + token::Lifetime(_) => true, + token::Interpolated(ref nt) => match **nt { token::NtLifetime(_) | token::NtTT(_) => true, _ => false, }, diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index e6b49e61937d6..c2a1866b03a1b 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -225,7 +225,7 @@ pub fn transcribe( result.push(tt.clone().into()); } else { sp = sp.apply_mark(cx.current_expansion.mark); - let token = TokenTree::Token(sp, Token::Interpolated(nt.clone())); + let token = TokenTree::Token(sp, token::Interpolated(nt.clone())); result.push(token.into()); } } else { diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs index 5df22f28797a4..fc09943d4f5a8 100644 --- a/src/libsyntax/parse/diagnostics.rs +++ b/src/libsyntax/parse/diagnostics.rs @@ -294,7 +294,7 @@ impl<'a> Parser<'a> { Applicability::MaybeIncorrect, ); } - let sp = if self.token == token::Token::Eof { + let sp = if self.token == token::Eof { // This is EOF, don't want to point at the following char, but rather the last token self.prev_span } else { @@ -732,7 +732,7 @@ impl<'a> Parser<'a> { let this_token_str = self.this_token_descr(); let (prev_sp, sp) = match (&self.token, self.subparser_name) { // Point at the end of the macro call when reaching end of macro arguments. - (token::Token::Eof, Some(_)) => { + (token::Eof, Some(_)) => { let sp = self.sess.source_map().next_point(self.span); (sp, sp) } @@ -740,14 +740,14 @@ impl<'a> Parser<'a> { // This happens when the parser finds an empty TokenStream. _ if self.prev_span == DUMMY_SP => (self.span, self.span), // EOF, don't want to point at the following char, but rather the last token. - (token::Token::Eof, None) => (self.prev_span, self.span), + (token::Eof, None) => (self.prev_span, self.span), _ => (self.sess.source_map().next_point(self.prev_span), self.span), }; let msg = format!( "expected `{}`, found {}", token_str, match (&self.token, self.subparser_name) { - (token::Token::Eof, Some(origin)) => format!("end of {}", origin), + (token::Eof, Some(origin)) => format!("end of {}", origin), _ => this_token_str, }, ); @@ -1215,7 +1215,7 @@ impl<'a> Parser<'a> { crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> { let (span, msg) = match (&self.token, self.subparser_name) { - (&token::Token::Eof, Some(origin)) => { + (&token::Eof, Some(origin)) => { let sp = self.sess.source_map().next_point(self.span); (sp, format!("expected expression, found end of {}", origin)) } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index f7a7aba9ecbaa..60d04ae9d942a 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -311,7 +311,7 @@ pub fn maybe_file_to_stream( for unmatched in unmatched_braces { let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!( "incorrect close delimiter: `{}`", - token_to_string(&token::Token::CloseDelim(unmatched.found_delim)), + token_to_string(&token::CloseDelim(unmatched.found_delim)), )); db.span_label(unmatched.found_span, "incorrect close delimiter"); if let Some(sp) = unmatched.candidate_span { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 790013f6eb128..8409e300fc9cd 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -3359,7 +3359,7 @@ impl<'a> Parser<'a> { let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) { - if self.token == token::Token::Semi { + if self.token == token::Semi { e.span_suggestion_short( match_span, "try removing this `match`", @@ -5920,7 +5920,7 @@ impl<'a> Parser<'a> { while !self.eat(&token::CloseDelim(token::Brace)) { if let token::DocComment(_) = self.token { if self.look_ahead(1, - |tok| tok == &token::Token::CloseDelim(token::Brace)) { + |tok| tok == &token::CloseDelim(token::Brace)) { let mut err = self.diagnostic().struct_span_err_with_code( self.span, "found a documentation comment that doesn't document anything", @@ -6796,7 +6796,7 @@ impl<'a> Parser<'a> { let mut replacement = vec![]; let mut fixed_crate_name = false; // Accept `extern crate name-like-this` for better diagnostics - let dash = token::Token::BinOp(token::BinOpToken::Minus); + let dash = token::BinOp(token::BinOpToken::Minus); if self.token == dash { // Do not include `-` as part of the expected tokens list while self.eat(&dash) { fixed_crate_name = true; @@ -7869,7 +7869,7 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec, handler: for unmatched in unclosed_delims.iter() { let mut err = handler.struct_span_err(unmatched.found_span, &format!( "incorrect close delimiter: `{}`", - pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)), + pprust::token_to_string(&token::CloseDelim(unmatched.found_delim)), )); err.span_label(unmatched.found_span, "incorrect close delimiter"); if let Some(sp) = unmatched.candidate_span { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 47185df8d6165..d54d12698bbb6 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -691,11 +691,11 @@ impl Nonterminal { prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) } Nonterminal::NtIdent(ident, is_raw) => { - let token = Token::Ident(ident, is_raw); + let token = Ident(ident, is_raw); Some(TokenTree::Token(ident.span, token).into()) } Nonterminal::NtLifetime(ident) => { - let token = Token::Lifetime(ident); + let token = Lifetime(ident); Some(TokenTree::Token(ident.span, token).into()) } Nonterminal::NtTT(ref tt) => { diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 397fb45513c15..5a934cd9f0839 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -167,7 +167,7 @@ impl TokenTree { /// A `TokenStream` is an abstract sequence of tokens, organized into `TokenTree`s. /// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s /// instead of a representation of the abstract syntax tree. -/// Today's `TokenTree`s can still contain AST via `Token::Interpolated` for back-compat. +/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat. /// /// The use of `Option` is an optimization that avoids the need for an /// allocation when the stream is empty. However, it is not guaranteed that an @@ -201,7 +201,7 @@ impl TokenStream { while let Some((pos, ts)) = iter.next() { if let Some((_, next)) = iter.peek() { let sp = match (&ts, &next) { - (_, (TokenTree::Token(_, token::Token::Comma), _)) => continue, + (_, (TokenTree::Token(_, token::Comma), _)) => continue, ((TokenTree::Token(sp, token_left), NonJoint), (TokenTree::Token(_, token_right), _)) if ((token_left.is_ident() && !token_left.is_reserved_ident()) @@ -352,17 +352,17 @@ impl TokenStream { match tree { // The pretty printer tends to add trailing commas to // everything, and in particular, after struct fields. - | TokenTree::Token(_, Token::Comma) + | TokenTree::Token(_, token::Comma) // The pretty printer emits `NoDelim` as whitespace. - | TokenTree::Token(_, Token::OpenDelim(DelimToken::NoDelim)) - | TokenTree::Token(_, Token::CloseDelim(DelimToken::NoDelim)) + | TokenTree::Token(_, token::OpenDelim(DelimToken::NoDelim)) + | TokenTree::Token(_, token::CloseDelim(DelimToken::NoDelim)) // The pretty printer collapses many semicolons into one. - | TokenTree::Token(_, Token::Semi) + | TokenTree::Token(_, token::Semi) // The pretty printer collapses whitespace arbitrarily and can // introduce whitespace from `NoDelim`. - | TokenTree::Token(_, Token::Whitespace) + | TokenTree::Token(_, token::Whitespace) // The pretty printer can turn `$crate` into `::crate_name` - | TokenTree::Token(_, Token::ModSep) => false, + | TokenTree::Token(_, token::ModSep) => false, _ => true } } @@ -664,7 +664,7 @@ mod tests { with_default_globals(|| { let test0: TokenStream = Vec::::new().into_iter().collect(); let test1: TokenStream = - TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"), false)).into(); + TokenTree::Token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into(); let test2 = string_to_ts("foo(bar::baz)"); assert_eq!(test0.is_empty(), true); @@ -677,9 +677,9 @@ mod tests { fn test_dotdotdot() { with_default_globals(|| { let mut builder = TokenStreamBuilder::new(); - builder.push(TokenTree::Token(sp(0, 1), Token::Dot).joint()); - builder.push(TokenTree::Token(sp(1, 2), Token::Dot).joint()); - builder.push(TokenTree::Token(sp(2, 3), Token::Dot)); + builder.push(TokenTree::Token(sp(0, 1), token::Dot).joint()); + builder.push(TokenTree::Token(sp(1, 2), token::Dot).joint()); + builder.push(TokenTree::Token(sp(2, 3), token::Dot)); let stream = builder.build(); assert!(stream.eq_unspanned(&string_to_ts("..."))); assert_eq!(stream.trees().count(), 1); diff --git a/src/libsyntax/util/parser.rs b/src/libsyntax/util/parser.rs index 7e306d59e35ce..8dc9ce39915ad 100644 --- a/src/libsyntax/util/parser.rs +++ b/src/libsyntax/util/parser.rs @@ -1,4 +1,4 @@ -use crate::parse::token::{Token, BinOpToken}; +use crate::parse::token::{self, Token, BinOpToken}; use crate::symbol::kw; use crate::ast::{self, BinOpKind}; @@ -72,31 +72,31 @@ impl AssocOp { pub fn from_token(t: &Token) -> Option { use AssocOp::*; match *t { - Token::BinOpEq(k) => Some(AssignOp(k)), - Token::Eq => Some(Assign), - Token::BinOp(BinOpToken::Star) => Some(Multiply), - Token::BinOp(BinOpToken::Slash) => Some(Divide), - Token::BinOp(BinOpToken::Percent) => Some(Modulus), - Token::BinOp(BinOpToken::Plus) => Some(Add), - Token::BinOp(BinOpToken::Minus) => Some(Subtract), - Token::BinOp(BinOpToken::Shl) => Some(ShiftLeft), - Token::BinOp(BinOpToken::Shr) => Some(ShiftRight), - Token::BinOp(BinOpToken::And) => Some(BitAnd), - Token::BinOp(BinOpToken::Caret) => Some(BitXor), - Token::BinOp(BinOpToken::Or) => Some(BitOr), - Token::Lt => Some(Less), - Token::Le => Some(LessEqual), - Token::Ge => Some(GreaterEqual), - Token::Gt => Some(Greater), - Token::EqEq => Some(Equal), - Token::Ne => Some(NotEqual), - Token::AndAnd => Some(LAnd), - Token::OrOr => Some(LOr), - Token::DotDot => Some(DotDot), - Token::DotDotEq => Some(DotDotEq), + token::BinOpEq(k) => Some(AssignOp(k)), + token::Eq => Some(Assign), + token::BinOp(BinOpToken::Star) => Some(Multiply), + token::BinOp(BinOpToken::Slash) => Some(Divide), + token::BinOp(BinOpToken::Percent) => Some(Modulus), + token::BinOp(BinOpToken::Plus) => Some(Add), + token::BinOp(BinOpToken::Minus) => Some(Subtract), + token::BinOp(BinOpToken::Shl) => Some(ShiftLeft), + token::BinOp(BinOpToken::Shr) => Some(ShiftRight), + token::BinOp(BinOpToken::And) => Some(BitAnd), + token::BinOp(BinOpToken::Caret) => Some(BitXor), + token::BinOp(BinOpToken::Or) => Some(BitOr), + token::Lt => Some(Less), + token::Le => Some(LessEqual), + token::Ge => Some(GreaterEqual), + token::Gt => Some(Greater), + token::EqEq => Some(Equal), + token::Ne => Some(NotEqual), + token::AndAnd => Some(LAnd), + token::OrOr => Some(LOr), + token::DotDot => Some(DotDot), + token::DotDotEq => Some(DotDotEq), // DotDotDot is no longer supported, but we need some way to display the error - Token::DotDotDot => Some(DotDotEq), - Token::Colon => Some(Colon), + token::DotDotDot => Some(DotDotEq), + token::Colon => Some(Colon), _ if t.is_keyword(kw::As) => Some(As), _ => None } diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs index 975d96951dc55..c1d93805a5811 100644 --- a/src/libsyntax_ext/deriving/custom.rs +++ b/src/libsyntax_ext/deriving/custom.rs @@ -8,7 +8,7 @@ use syntax::attr::{mark_used, mark_known}; use syntax::source_map::Span; use syntax::ext::base::*; use syntax::parse; -use syntax::parse::token::{self, Token}; +use syntax::parse::token; use syntax::tokenstream; use syntax::visit::Visitor; use syntax_pos::DUMMY_SP; @@ -68,7 +68,7 @@ impl MultiItemModifier for ProcMacroDerive { // Mark attributes as known, and used. MarkAttrs(&self.attrs).visit_item(&item); - let token = Token::Interpolated(Lrc::new(token::NtItem(item))); + let token = token::Interpolated(Lrc::new(token::NtItem(item))); let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into(); let server = proc_macro_server::Rustc::new(ecx); From 99b27d749c22117eccf862f5ee4eb540b65b681f Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Tue, 4 Jun 2019 17:55:23 +0300 Subject: [PATCH 02/14] syntax: Rename `Token` into `TokenKind` --- src/librustc/hir/lowering.rs | 4 +- src/librustc/hir/map/def_collector.rs | 4 +- src/librustc/ich/impls_syntax.rs | 2 +- src/librustc_resolve/build_reduced_graph.rs | 4 +- src/librustc_save_analysis/span_utils.rs | 4 +- src/libsyntax/attr/mod.rs | 4 +- src/libsyntax/ext/tt/macro_parser.rs | 16 +++---- src/libsyntax/ext/tt/macro_rules.rs | 2 +- src/libsyntax/ext/tt/quoted.rs | 18 ++++---- src/libsyntax/ext/tt/transcribe.rs | 6 +-- src/libsyntax/mut_visit.rs | 6 +-- src/libsyntax/parse/diagnostics.rs | 12 +++--- src/libsyntax/parse/lexer/mod.rs | 32 +++++++------- src/libsyntax/parse/lexer/tokentrees.rs | 2 +- src/libsyntax/parse/literal.rs | 6 +-- src/libsyntax/parse/mod.rs | 4 +- src/libsyntax/parse/parser.rs | 48 ++++++++++----------- src/libsyntax/parse/token.rs | 28 ++++++------ src/libsyntax/print/pprust.rs | 4 +- src/libsyntax/tokenstream.rs | 14 +++--- src/libsyntax/util/parser.rs | 4 +- src/libsyntax/visit.rs | 4 +- src/libsyntax_ext/assert.rs | 4 +- src/libsyntax_ext/proc_macro_server.rs | 6 +-- 24 files changed, 119 insertions(+), 119 deletions(-) diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 089e5de01a21f..919f682fc4f6f 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -67,7 +67,7 @@ use syntax::source_map::CompilerDesugaringKind::IfTemporary; use syntax::std_inject; use syntax::symbol::{kw, sym, Symbol}; use syntax::tokenstream::{TokenStream, TokenTree}; -use syntax::parse::token::{self, Token}; +use syntax::parse::token::{self, TokenKind}; use syntax::visit::{self, Visitor}; use syntax_pos::{DUMMY_SP, edition, Span}; @@ -1337,7 +1337,7 @@ impl<'a> LoweringContext<'a> { } } - fn lower_token(&mut self, token: Token, span: Span) -> TokenStream { + fn lower_token(&mut self, token: TokenKind, span: Span) -> TokenStream { match token { token::Interpolated(nt) => { let tts = nt.to_tokenstream(&self.sess.parse_sess, span); diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index 0fa0d1ea00c95..b9a80ebb78f20 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -6,7 +6,7 @@ use syntax::ast::*; use syntax::ext::hygiene::Mark; use syntax::visit; use syntax::symbol::{kw, sym}; -use syntax::parse::token::{self, Token}; +use syntax::parse::token::{self, TokenKind}; use syntax_pos::Span; /// Creates `DefId`s for nodes in the AST. @@ -325,7 +325,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { } } - fn visit_token(&mut self, t: Token) { + fn visit_token(&mut self, t: TokenKind) { if let token::Interpolated(nt) = t { if let token::NtExpr(ref expr) = *nt { if let ExprKind::Mac(..) = expr.node { diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index 6e1eba0af56f9..8e2550d3c4537 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -307,7 +307,7 @@ impl_stable_hash_for!(struct token::Lit { }); fn hash_token<'a, 'gcx, W: StableHasherResult>( - token: &token::Token, + token: &token::TokenKind, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher, ) { diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 76279cc028341..a7a78a69952f4 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -34,7 +34,7 @@ use syntax::ext::base::Determinacy::Undetermined; use syntax::ext::hygiene::Mark; use syntax::ext::tt::macro_rules; use syntax::feature_gate::is_builtin_attr; -use syntax::parse::token::{self, Token}; +use syntax::parse::token::{self, TokenKind}; use syntax::span_err; use syntax::std_inject::injected_crate_name; use syntax::symbol::{kw, sym}; @@ -1052,7 +1052,7 @@ impl<'a, 'b> Visitor<'a> for BuildReducedGraphVisitor<'a, 'b> { self.resolver.current_module = parent; } - fn visit_token(&mut self, t: Token) { + fn visit_token(&mut self, t: TokenKind) { if let token::Interpolated(nt) = t { if let token::NtExpr(ref expr) = *nt { if let ast::ExprKind::Mac(..) = expr.node { diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index e2c93b6d33158..5527fcb923b6f 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -5,7 +5,7 @@ use crate::generated_code; use std::cell::Cell; use syntax::parse::lexer::{self, StringReader}; -use syntax::parse::token::{self, Token}; +use syntax::parse::token::{self, TokenKind}; use syntax_pos::*; #[derive(Clone)] @@ -56,7 +56,7 @@ impl<'a> SpanUtils<'a> { lexer::StringReader::retokenize(&self.sess.parse_sess, span) } - pub fn sub_span_of_token(&self, span: Span, tok: Token) -> Option { + pub fn sub_span_of_token(&self, span: Span, tok: TokenKind) -> Option { let mut toks = self.retokenise_span(span); loop { let next = toks.real_token(); diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index c57510ab1a0be..ade15f024a609 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -20,7 +20,7 @@ use crate::source_map::{BytePos, Spanned, dummy_spanned}; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use crate::parse::parser::Parser; use crate::parse::{self, ParseSess, PResult}; -use crate::parse::token::{self, Token}; +use crate::parse::token::{self, TokenKind}; use crate::ptr::P; use crate::symbol::{sym, Symbol}; use crate::ThinVec; @@ -468,7 +468,7 @@ impl MetaItem { idents.push(TokenTree::Token(mod_sep_span, token::ModSep).into()); } idents.push(TokenTree::Token(segment.ident.span, - Token::from_ast_ident(segment.ident)).into()); + TokenKind::from_ast_ident(segment.ident)).into()); last_pos = segment.ident.span.hi(); } self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents); diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 473a5f414dfa8..c22952ed7504b 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -78,7 +78,7 @@ use crate::ast::Ident; use crate::ext::tt::quoted::{self, TokenTree}; use crate::parse::{Directory, ParseSess}; use crate::parse::parser::{Parser, PathStyle}; -use crate::parse::token::{self, DocComment, Nonterminal, Token}; +use crate::parse::token::{self, DocComment, Nonterminal, TokenKind}; use crate::print::pprust; use crate::symbol::{kw, sym, Symbol}; use crate::tokenstream::{DelimSpan, TokenStream}; @@ -199,7 +199,7 @@ struct MatcherPos<'root, 'tt: 'root> { seq_op: Option, /// The separator if we are in a repetition. - sep: Option, + sep: Option, /// The "parent" matcher position if we are in a repetition. That is, the matcher position just /// before we enter the sequence. @@ -273,7 +273,7 @@ pub enum ParseResult { Success(T), /// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected /// end of macro invocation. Otherwise, it indicates that no rules expected the given token. - Failure(syntax_pos::Span, Token, &'static str), + Failure(syntax_pos::Span, TokenKind, &'static str), /// Fatal error (malformed macro?). Abort compilation. Error(syntax_pos::Span, String), } @@ -417,7 +417,7 @@ fn nameize>( /// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For /// other tokens, this is "unexpected token...". -pub fn parse_failure_msg(tok: Token) -> String { +pub fn parse_failure_msg(tok: TokenKind) -> String { match tok { token::Eof => "unexpected end of macro invocation".to_string(), _ => format!( @@ -428,7 +428,7 @@ pub fn parse_failure_msg(tok: Token) -> String { } /// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison) -fn token_name_eq(t1: &Token, t2: &Token) -> bool { +fn token_name_eq(t1: &TokenKind, t2: &TokenKind) -> bool { if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) { id1.name == id2.name && is_raw1 == is_raw2 } else if let (Some(id1), Some(id2)) = (t1.lifetime(), t2.lifetime()) { @@ -466,7 +466,7 @@ fn inner_parse_loop<'root, 'tt>( next_items: &mut Vec>, eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, - token: &Token, + token: &TokenKind, span: syntax_pos::Span, ) -> ParseResult<()> { // Pop items from `cur_items` until it is empty. @@ -807,7 +807,7 @@ pub fn parse( /// The token is an identifier, but not `_`. /// We prohibit passing `_` to macros expecting `ident` for now. -fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> { +fn get_macro_ident(token: &TokenKind) -> Option<(Ident, bool)> { match *token { token::Ident(ident, is_raw) if ident.name != kw::Underscore => Some((ident, is_raw)), @@ -819,7 +819,7 @@ fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> { /// /// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with that /// token. Be conservative (return true) if not sure. -fn may_begin_with(name: Symbol, token: &Token) -> bool { +fn may_begin_with(name: Symbol, token: &TokenKind) -> bool { /// Checks whether the non-terminal may contain a single (non-keyword) identifier. fn may_be_ident(nt: &token::Nonterminal) -> bool { match *nt { diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 285c88357a6a8..9d3ea4d8645da 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -12,7 +12,7 @@ use crate::feature_gate::Features; use crate::parse::{Directory, ParseSess}; use crate::parse::parser::Parser; use crate::parse::token::{self, NtTT}; -use crate::parse::token::Token::*; +use crate::parse::token::TokenKind::*; use crate::symbol::{Symbol, kw, sym}; use crate::tokenstream::{DelimSpan, TokenStream, TokenTree}; diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index a029c65465952..fe0cb56b29e30 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -23,12 +23,12 @@ pub struct Delimited { impl Delimited { /// Returns the opening delimiter (possibly `NoDelim`). - pub fn open_token(&self) -> token::Token { + pub fn open_token(&self) -> token::TokenKind { token::OpenDelim(self.delim) } /// Returns the closing delimiter (possibly `NoDelim`). - pub fn close_token(&self) -> token::Token { + pub fn close_token(&self) -> token::TokenKind { token::CloseDelim(self.delim) } @@ -58,7 +58,7 @@ pub struct SequenceRepetition { /// The sequence of token trees pub tts: Vec, /// The optional separator - pub separator: Option, + pub separator: Option, /// Whether the sequence can be repeated zero (*), or one or more times (+) pub op: KleeneOp, /// The number of `Match`s that appear in the sequence (and subsequences) @@ -81,7 +81,7 @@ pub enum KleeneOp { /// are "first-class" token trees. Useful for parsing macros. #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum TokenTree { - Token(Span, token::Token), + Token(Span, token::TokenKind), Delimited(DelimSpan, Lrc), /// A kleene-style repetition sequence Sequence(DelimSpan, Lrc), @@ -366,7 +366,7 @@ where /// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return /// `None`. -fn kleene_op(token: &token::Token) -> Option { +fn kleene_op(token: &token::TokenKind) -> Option { match *token { token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore), token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore), @@ -383,7 +383,7 @@ fn kleene_op(token: &token::Token) -> Option { fn parse_kleene_op( input: &mut I, span: Span, -) -> Result, Span> +) -> Result, Span> where I: Iterator, { @@ -422,7 +422,7 @@ fn parse_sep_and_kleene_op( attrs: &[ast::Attribute], edition: Edition, macro_node_id: NodeId, -) -> (Option, KleeneOp) +) -> (Option, KleeneOp) where I: Iterator, { @@ -447,7 +447,7 @@ fn parse_sep_and_kleene_op_2015( _features: &Features, _attrs: &[ast::Attribute], macro_node_id: NodeId, -) -> (Option, KleeneOp) +) -> (Option, KleeneOp) where I: Iterator, { @@ -565,7 +565,7 @@ fn parse_sep_and_kleene_op_2018( sess: &ParseSess, _features: &Features, _attrs: &[ast::Attribute], -) -> (Option, KleeneOp) +) -> (Option, KleeneOp) where I: Iterator, { diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index c2a1866b03a1b..1b169d7696af3 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -4,7 +4,7 @@ use crate::ext::expand::Marker; use crate::ext::tt::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch}; use crate::ext::tt::quoted; use crate::mut_visit::noop_visit_tt; -use crate::parse::token::{self, NtTT, Token}; +use crate::parse::token::{self, NtTT, TokenKind}; use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint}; use smallvec::{smallvec, SmallVec}; @@ -18,7 +18,7 @@ use std::rc::Rc; /// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`). enum Frame { Delimited { forest: Lrc, idx: usize, span: DelimSpan }, - Sequence { forest: Lrc, idx: usize, sep: Option }, + Sequence { forest: Lrc, idx: usize, sep: Option }, } impl Frame { @@ -242,7 +242,7 @@ pub fn transcribe( Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark)); sp = sp.apply_mark(cx.current_expansion.mark); result.push(TokenTree::Token(sp, token::Dollar).into()); - result.push(TokenTree::Token(sp, token::Token::from_ast_ident(ident)).into()); + result.push(TokenTree::Token(sp, token::TokenKind::from_ast_ident(ident)).into()); } } diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index fb1a7a680baaf..289f2c0ce4864 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -9,7 +9,7 @@ use crate::ast::*; use crate::source_map::{Spanned, respan}; -use crate::parse::token::{self, Token}; +use crate::parse::token::{self, TokenKind}; use crate::ptr::P; use crate::ThinVec; use crate::tokenstream::*; @@ -262,7 +262,7 @@ pub trait MutVisitor: Sized { noop_visit_tts(tts, self); } - fn visit_token(&mut self, t: &mut Token) { + fn visit_token(&mut self, t: &mut TokenKind) { noop_visit_token(t, self); } @@ -596,7 +596,7 @@ pub fn noop_visit_tts(TokenStream(tts): &mut TokenStream, vis: &m } // apply ident visitor if it's an ident, apply other visits to interpolated nodes -pub fn noop_visit_token(t: &mut Token, vis: &mut T) { +pub fn noop_visit_token(t: &mut TokenKind, vis: &mut T) { match t { token::Ident(id, _is_raw) => vis.visit_ident(id), token::Lifetime(id) => vis.visit_ident(id), diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs index fc09943d4f5a8..b391f7ca327e8 100644 --- a/src/libsyntax/parse/diagnostics.rs +++ b/src/libsyntax/parse/diagnostics.rs @@ -229,8 +229,8 @@ impl<'a> Parser<'a> { pub fn expected_one_of_not_found( &mut self, - edible: &[token::Token], - inedible: &[token::Token], + edible: &[token::TokenKind], + inedible: &[token::TokenKind], ) -> PResult<'a, bool /* recovered */> { fn tokens_to_string(tokens: &[TokenType]) -> String { let mut i = tokens.iter(); @@ -368,7 +368,7 @@ impl<'a> Parser<'a> { /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. - crate fn eat_to_tokens(&mut self, kets: &[&token::Token]) { + crate fn eat_to_tokens(&mut self, kets: &[&token::TokenKind]) { let handler = self.diagnostic(); if let Err(ref mut err) = self.parse_seq_to_before_tokens( @@ -388,7 +388,7 @@ impl<'a> Parser<'a> { /// let _ = vec![1, 2, 3].into_iter().collect::>>>(); /// ^^ help: remove extra angle brackets /// ``` - crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::Token) { + crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::TokenKind) { // This function is intended to be invoked after parsing a path segment where there are two // cases: // @@ -726,7 +726,7 @@ impl<'a> Parser<'a> { /// closing delimiter. pub fn unexpected_try_recover( &mut self, - t: &token::Token, + t: &token::TokenKind, ) -> PResult<'a, bool /* recovered */> { let token_str = pprust::token_to_string(t); let this_token_str = self.this_token_descr(); @@ -903,7 +903,7 @@ impl<'a> Parser<'a> { crate fn recover_closing_delimiter( &mut self, - tokens: &[token::Token], + tokens: &[token::TokenKind], mut err: DiagnosticBuilder<'a>, ) -> PResult<'a, bool> { let mut pos = None; diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index a06a84f162a96..ca9199975bb7e 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1,6 +1,6 @@ use crate::ast::{self, Ident}; use crate::parse::ParseSess; -use crate::parse::token::{self, Token}; +use crate::parse::token::{self, TokenKind}; use crate::symbol::{sym, Symbol}; use crate::parse::unescape; use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char}; @@ -22,7 +22,7 @@ mod unicode_chars; #[derive(Clone, Debug)] pub struct TokenAndSpan { - pub tok: Token, + pub tok: TokenKind, pub sp: Span, } @@ -56,7 +56,7 @@ pub struct StringReader<'a> { /// Stop reading src at this index. crate end_src_index: usize, // cached: - peek_tok: Token, + peek_tok: TokenKind, peek_span: Span, peek_span_src_raw: Span, fatal_errs: Vec>, @@ -847,7 +847,7 @@ impl<'a> StringReader<'a> { } } - fn binop(&mut self, op: token::BinOpToken) -> Token { + fn binop(&mut self, op: token::BinOpToken) -> TokenKind { self.bump(); if self.ch_is('=') { self.bump(); @@ -859,7 +859,7 @@ impl<'a> StringReader<'a> { /// Returns the next token from the string, advances the input past that /// token, and updates the interner - fn next_token_inner(&mut self) -> Result { + fn next_token_inner(&mut self) -> Result { let c = self.ch; if ident_start(c) { @@ -916,7 +916,7 @@ impl<'a> StringReader<'a> { let (kind, symbol) = self.scan_number(c.unwrap()); let suffix = self.scan_optional_raw_name(); debug!("next_token_inner: scanned number {:?}, {:?}, {:?}", kind, symbol, suffix); - return Ok(Token::lit(kind, symbol, suffix)); + return Ok(TokenKind::lit(kind, symbol, suffix)); } match c.expect("next_token_inner called at EOF") { @@ -1077,7 +1077,7 @@ impl<'a> StringReader<'a> { let symbol = self.name_from(start); self.bump(); self.validate_char_escape(start_with_quote); - return Ok(Token::lit(token::Char, symbol, None)); + return Ok(TokenKind::lit(token::Char, symbol, None)); } // Include the leading `'` in the real identifier, for macro @@ -1102,7 +1102,7 @@ impl<'a> StringReader<'a> { let symbol = self.scan_single_quoted_string(start_with_quote, msg); self.validate_char_escape(start_with_quote); let suffix = self.scan_optional_raw_name(); - Ok(Token::lit(token::Char, symbol, suffix)) + Ok(TokenKind::lit(token::Char, symbol, suffix)) } 'b' => { self.bump(); @@ -1127,7 +1127,7 @@ impl<'a> StringReader<'a> { }; let suffix = self.scan_optional_raw_name(); - Ok(Token::lit(kind, symbol, suffix)) + Ok(TokenKind::lit(kind, symbol, suffix)) } '"' => { let start_with_quote = self.pos; @@ -1135,7 +1135,7 @@ impl<'a> StringReader<'a> { let symbol = self.scan_double_quoted_string(msg); self.validate_str_escape(start_with_quote); let suffix = self.scan_optional_raw_name(); - Ok(Token::lit(token::Str, symbol, suffix)) + Ok(TokenKind::lit(token::Str, symbol, suffix)) } 'r' => { let start_bpos = self.pos; @@ -1213,7 +1213,7 @@ impl<'a> StringReader<'a> { }; let suffix = self.scan_optional_raw_name(); - Ok(Token::lit(token::StrRaw(hash_count), symbol, suffix)) + Ok(TokenKind::lit(token::StrRaw(hash_count), symbol, suffix)) } '-' => { if self.nextch_is('>') { @@ -1638,19 +1638,19 @@ mod tests { // check that the given reader produces the desired stream // of tokens (stop checking after exhausting the expected vec) - fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec) { + fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec) { for expected_tok in &expected { assert_eq!(&string_reader.next_token().tok, expected_tok); } } // make the identifier by looking up the string in the interner - fn mk_ident(id: &str) -> Token { - Token::from_ast_ident(Ident::from_str(id)) + fn mk_ident(id: &str) -> TokenKind { + TokenKind::from_ast_ident(Ident::from_str(id)) } - fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> Token { - Token::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern)) + fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind { + TokenKind::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern)) } #[test] diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 4bfc5bb16c0bb..b8cd32883b88c 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -23,7 +23,7 @@ impl<'a> StringReader<'a> { struct TokenTreesReader<'a> { string_reader: StringReader<'a>, - token: token::Token, + token: token::TokenKind, span: Span, /// Stack of open delimiters and their spans. Used for error message. open_braces: Vec<(token::DelimToken, Span)>, diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 18019a89130e7..945475ff9818b 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -3,7 +3,7 @@ use crate::ast::{self, Ident, Lit, LitKind}; use crate::parse::parser::Parser; use crate::parse::PResult; -use crate::parse::token::{self, Token}; +use crate::parse::token::{self, TokenKind}; use crate::parse::unescape::{unescape_str, unescape_char, unescape_byte_str, unescape_byte}; use crate::print::pprust; use crate::symbol::{kw, sym, Symbol}; @@ -228,7 +228,7 @@ impl Lit { } /// Converts arbitrary token into an AST literal. - crate fn from_token(token: &Token, span: Span) -> Result { + crate fn from_token(token: &TokenKind, span: Span) -> Result { let lit = match *token { token::Ident(ident, false) if ident.name == kw::True || ident.name == kw::False => token::Lit::new(token::Bool, ident.name, None), @@ -276,7 +276,7 @@ impl<'a> Parser<'a> { let next_span = self.look_ahead_span(1); if self.span.hi() == next_span.lo() { let s = String::from("0.") + &symbol.as_str(); - let token = Token::lit(token::Float, Symbol::intern(&s), suffix); + let token = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); return Some((token, self.span.to(next_span))); } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 60d04ae9d942a..7f8b96508bdd9 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -358,13 +358,13 @@ pub fn stream_to_parser_with_base_dir<'a>( /// A sequence separator. pub struct SeqSep { /// The seperator token. - pub sep: Option, + pub sep: Option, /// `true` if a trailing separator is allowed. pub trailing_sep_allowed: bool, } impl SeqSep { - pub fn trailing_allowed(t: token::Token) -> SeqSep { + pub fn trailing_allowed(t: token::TokenKind) -> SeqSep { SeqSep { sep: Some(t), trailing_sep_allowed: true, diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 8409e300fc9cd..8fc02dd9259e0 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -196,9 +196,9 @@ enum PrevTokenKind { #[derive(Clone)] pub struct Parser<'a> { pub sess: &'a ParseSess, - /// The current token. - pub token: token::Token, - /// The span of the current token. + /// the current token: + pub token: token::TokenKind, + /// the span of the current token: pub span: Span, meta_var_span: Option, /// The span of the previous token. @@ -355,7 +355,7 @@ impl TokenCursor { [ TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)), TokenTree::Token(sp, token::Eq), - TokenTree::Token(sp, token::Token::lit( + TokenTree::Token(sp, token::TokenKind::lit( token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None )), ] @@ -380,7 +380,7 @@ impl TokenCursor { #[derive(Clone, PartialEq)] crate enum TokenType { - Token(token::Token), + Token(token::TokenKind), Keyword(Symbol), Operator, Lifetime, @@ -410,7 +410,7 @@ impl TokenType { /// /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes /// that `IDENT` is not the ident of a fn trait. -fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool { +fn can_continue_type_after_non_fn_ident(t: &token::TokenKind) -> bool { t == &token::ModSep || t == &token::Lt || t == &token::BinOp(token::Shl) } @@ -559,7 +559,7 @@ impl<'a> Parser<'a> { } /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. - pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> { + pub fn expect(&mut self, t: &token::TokenKind) -> PResult<'a, bool /* recovered */> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); @@ -577,8 +577,8 @@ impl<'a> Parser<'a> { /// anything. Signal a fatal error if next token is unexpected. pub fn expect_one_of( &mut self, - edible: &[token::Token], - inedible: &[token::Token], + edible: &[token::TokenKind], + inedible: &[token::TokenKind], ) -> PResult<'a, bool /* recovered */> { if edible.contains(&self.token) { self.bump(); @@ -640,14 +640,14 @@ impl<'a> Parser<'a> { /// /// This method will automatically add `tok` to `expected_tokens` if `tok` is not /// encountered. - crate fn check(&mut self, tok: &token::Token) -> bool { + crate fn check(&mut self, tok: &token::TokenKind) -> bool { let is_present = self.token == *tok; if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); } is_present } /// Consumes a token 'tok' if it exists. Returns whether the given token was present. - pub fn eat(&mut self, tok: &token::Token) -> bool { + pub fn eat(&mut self, tok: &token::TokenKind) -> bool { let is_present = self.check(tok); if is_present { self.bump() } is_present @@ -883,7 +883,7 @@ impl<'a> Parser<'a> { /// `f` must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_seq_to_end(&mut self, - ket: &token::Token, + ket: &token::TokenKind, sep: SeqSep, f: F) -> PResult<'a, Vec> where @@ -901,7 +901,7 @@ impl<'a> Parser<'a> { /// closing bracket. pub fn parse_seq_to_before_end( &mut self, - ket: &token::Token, + ket: &token::TokenKind, sep: SeqSep, f: F, ) -> PResult<'a, (Vec, bool)> @@ -912,7 +912,7 @@ impl<'a> Parser<'a> { crate fn parse_seq_to_before_tokens( &mut self, - kets: &[&token::Token], + kets: &[&token::TokenKind], sep: SeqSep, expect: TokenExpectType, mut f: F, @@ -986,8 +986,8 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_unspanned_seq( &mut self, - bra: &token::Token, - ket: &token::Token, + bra: &token::TokenKind, + ket: &token::TokenKind, sep: SeqSep, f: F, ) -> PResult<'a, Vec> where @@ -1032,7 +1032,7 @@ impl<'a> Parser<'a> { /// Advance the parser using provided token as a next one. Use this when /// consuming a part of a token. For example a single `<` from `<<`. - fn bump_with(&mut self, next: token::Token, span: Span) { + fn bump_with(&mut self, next: token::TokenKind, span: Span) { self.prev_span = self.span.with_hi(span.lo()); // It would be incorrect to record the kind of the current token, but // fortunately for tokens currently using `bump_with`, the @@ -1044,7 +1044,7 @@ impl<'a> Parser<'a> { } pub fn look_ahead(&self, dist: usize, f: F) -> R where - F: FnOnce(&token::Token) -> R, + F: FnOnce(&token::TokenKind) -> R, { if dist == 0 { return f(&self.token) @@ -1763,7 +1763,7 @@ impl<'a> Parser<'a> { fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> { let ident = self.parse_path_segment_ident()?; - let is_args_start = |token: &token::Token| match *token { + let is_args_start = |token: &token::TokenKind| match *token { token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren) | token::LArrow => true, _ => false, @@ -1992,7 +1992,7 @@ impl<'a> Parser<'a> { let ex: ExprKind; - // Note: when adding new syntax here, don't forget to adjust Token::can_begin_expr(). + // Note: when adding new syntax here, don't forget to adjust TokenKind::can_begin_expr(). match self.token { token::OpenDelim(token::Paren) => { self.bump(); @@ -2706,7 +2706,7 @@ impl<'a> Parser<'a> { -> PResult<'a, P> { let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; let lo = self.span; - // Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr() + // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() let (hi, ex) = match self.token { token::Not => { self.bump(); @@ -2760,7 +2760,7 @@ impl<'a> Parser<'a> { // `not` is just an ordinary identifier in Rust-the-language, // but as `rustc`-the-compiler, we can issue clever diagnostics // for confused users who really want to say `!` - let token_cannot_continue_expr = |t: &token::Token| match *t { + let token_cannot_continue_expr = |t: &token::TokenKind| match *t { // These tokens can start an expression after `!`, but // can't continue an expression after an ident token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw), @@ -4779,7 +4779,7 @@ impl<'a> Parser<'a> { let mut last_plus_span = None; let mut was_negative = false; loop { - // This needs to be synchronized with `Token::can_begin_bound`. + // This needs to be synchronized with `TokenKind::can_begin_bound`. let is_bound_start = self.check_path() || self.check_lifetime() || self.check(&token::Not) || // used for error reporting only self.check(&token::Question) || @@ -6413,7 +6413,7 @@ impl<'a> Parser<'a> { } /// Given a termination token, parses all of the items in a module. - fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> { + fn parse_mod_items(&mut self, term: &token::TokenKind, inner_lo: Span) -> PResult<'a, Mod> { let mut items = vec![]; while let Some(item) = self.parse_item()? { items.push(item); diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index d54d12698bbb6..aa1e8fd060f78 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -2,7 +2,7 @@ pub use BinOpToken::*; pub use Nonterminal::*; pub use DelimToken::*; pub use LitKind::*; -pub use Token::*; +pub use TokenKind::*; use crate::ast::{self}; use crate::parse::ParseSess; @@ -118,7 +118,7 @@ impl Lit { } pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool { - let ident_token: Token = Ident(ident, is_raw); + let ident_token: TokenKind = Ident(ident, is_raw); !ident_token.is_reserved_ident() || ident_token.is_path_segment_keyword() || @@ -149,7 +149,7 @@ pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool { } fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool { - let ident_token: Token = Ident(ident, is_raw); + let ident_token: TokenKind = Ident(ident, is_raw); !ident_token.is_reserved_ident() || ident_token.is_path_segment_keyword() || @@ -166,7 +166,7 @@ fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool { } #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] -pub enum Token { +pub enum TokenKind { /* Expression-operator symbols. */ Eq, Lt, @@ -231,13 +231,13 @@ pub enum Token { Eof, } -// `Token` is used a lot. Make sure it doesn't unintentionally get bigger. +// `TokenKind` is used a lot. Make sure it doesn't unintentionally get bigger. #[cfg(target_arch = "x86_64")] -static_assert_size!(Token, 16); +static_assert_size!(TokenKind, 16); -impl Token { - /// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary. - pub fn from_ast_ident(ident: ast::Ident) -> Token { +impl TokenKind { + /// Recovers a `TokenKind` from an `ast::Ident`. This creates a raw identifier if necessary. + pub fn from_ast_ident(ident: ast::Ident) -> TokenKind { Ident(ident, ident.is_raw_guess()) } @@ -323,7 +323,7 @@ impl Token { self == &Question || self == &OpenDelim(Paren) } - pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option) -> Token { + pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option) -> TokenKind { Literal(Lit::new(kind, symbol, suffix)) } @@ -468,7 +468,7 @@ impl Token { } } - crate fn glue(self, joint: Token) -> Option { + crate fn glue(self, joint: TokenKind) -> Option { Some(match self { Eq => match joint { Eq => EqEq, @@ -534,7 +534,7 @@ impl Token { /// Returns tokens that are likely to be typed accidentally instead of the current token. /// Enables better error recovery when the wrong token is found. - crate fn similar_tokens(&self) -> Option> { + crate fn similar_tokens(&self) -> Option> { match *self { Comma => Some(vec![Dot, Lt, Semi]), Semi => Some(vec![Colon, Comma]), @@ -544,7 +544,7 @@ impl Token { // See comments in `Nonterminal::to_tokenstream` for why we care about // *probably* equal here rather than actual equality - crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool { + crate fn probably_equal_for_proc_macro(&self, other: &TokenKind) -> bool { if mem::discriminant(self) != mem::discriminant(other) { return false } @@ -743,7 +743,7 @@ impl Nonterminal { } } -crate fn is_op(tok: &Token) -> bool { +crate fn is_op(tok: &TokenKind) -> bool { match *tok { OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..) | Lifetime(..) | Interpolated(..) | diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 57c01e9e3efea..cd7106191bee2 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -6,7 +6,7 @@ use crate::ast::{Attribute, MacDelimiter, GenericArg}; use crate::util::parser::{self, AssocOp, Fixity}; use crate::attr; use crate::source_map::{self, SourceMap, Spanned}; -use crate::parse::token::{self, BinOpToken, Nonterminal, Token}; +use crate::parse::token::{self, BinOpToken, Nonterminal, TokenKind}; use crate::parse::lexer::comments; use crate::parse::{self, ParseSess}; use crate::print::pp::{self, Breaks}; @@ -189,7 +189,7 @@ pub fn literal_to_string(lit: token::Lit) -> String { out } -pub fn token_to_string(tok: &Token) -> String { +pub fn token_to_string(tok: &TokenKind) -> String { match *tok { token::Eq => "=".to_string(), token::Lt => "<".to_string(), diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 5a934cd9f0839..0f50f51f5d35c 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -16,7 +16,7 @@ use crate::ext::base; use crate::ext::tt::{macro_parser, quoted}; use crate::parse::Directory; -use crate::parse::token::{self, DelimToken, Token}; +use crate::parse::token::{self, DelimToken, TokenKind}; use crate::print::pprust; use syntax_pos::{BytePos, Mark, Span, DUMMY_SP}; @@ -44,7 +44,7 @@ use std::{fmt, iter, mem}; #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum TokenTree { /// A single token - Token(Span, token::Token), + Token(Span, token::TokenKind), /// A delimited sequence of token trees Delimited(DelimSpan, DelimToken, TokenStream), } @@ -54,7 +54,7 @@ pub enum TokenTree { fn _dummy() where Span: Send + Sync, - token::Token: Send + Sync, + token::TokenKind: Send + Sync, DelimSpan: Send + Sync, DelimToken: Send + Sync, TokenStream: Send + Sync, @@ -130,7 +130,7 @@ impl TokenTree { } /// Indicates if the stream is a token that is equal to the provided token. - pub fn eq_token(&self, t: Token) -> bool { + pub fn eq_token(&self, t: TokenKind) -> bool { match *self { TokenTree::Token(_, ref tk) => *tk == t, _ => false, @@ -241,8 +241,8 @@ impl From for TreeAndJoint { } } -impl From for TokenStream { - fn from(token: Token) -> TokenStream { +impl From for TokenStream { + fn from(token: TokenKind) -> TokenStream { TokenTree::Token(DUMMY_SP, token).into() } } @@ -580,7 +580,7 @@ mod tests { use super::*; use crate::syntax::ast::Ident; use crate::with_default_globals; - use crate::parse::token::Token; + use crate::parse::token::TokenKind; use crate::util::parser_testing::string_to_stream; use syntax_pos::{Span, BytePos, NO_EXPANSION}; diff --git a/src/libsyntax/util/parser.rs b/src/libsyntax/util/parser.rs index 8dc9ce39915ad..9e26f1bf7d374 100644 --- a/src/libsyntax/util/parser.rs +++ b/src/libsyntax/util/parser.rs @@ -1,4 +1,4 @@ -use crate::parse::token::{self, Token, BinOpToken}; +use crate::parse::token::{self, TokenKind, BinOpToken}; use crate::symbol::kw; use crate::ast::{self, BinOpKind}; @@ -69,7 +69,7 @@ pub enum Fixity { impl AssocOp { /// Creates a new AssocOP from a token - pub fn from_token(t: &Token) -> Option { + pub fn from_token(t: &TokenKind) -> Option { use AssocOp::*; match *t { token::BinOpEq(k) => Some(AssignOp(k)), diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 334709b152197..35f70092be432 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -14,7 +14,7 @@ //! those that are created by the expansion of a macro. use crate::ast::*; -use crate::parse::token::Token; +use crate::parse::token::TokenKind; use crate::tokenstream::{TokenTree, TokenStream}; use syntax_pos::Span; @@ -151,7 +151,7 @@ pub trait Visitor<'ast>: Sized { fn visit_tts(&mut self, tts: TokenStream) { walk_tts(self, tts) } - fn visit_token(&mut self, _t: Token) {} + fn visit_token(&mut self, _t: TokenKind) {} // FIXME: add `visit_interpolated` and `walk_interpolated` fn visit_vis(&mut self, vis: &'ast Visibility) { walk_vis(self, vis) diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs index 13342c8e28e2f..29dd445e75168 100644 --- a/src/libsyntax_ext/assert.rs +++ b/src/libsyntax_ext/assert.rs @@ -4,7 +4,7 @@ use syntax::ast::{self, *}; use syntax::source_map::Spanned; use syntax::ext::base::*; use syntax::ext::build::AstBuilder; -use syntax::parse::token::{self, Token}; +use syntax::parse::token::{self, TokenKind}; use syntax::parse::parser::Parser; use syntax::print::pprust; use syntax::ptr::P; @@ -31,7 +31,7 @@ pub fn expand_assert<'cx>( tts: custom_message.unwrap_or_else(|| { TokenStream::from(TokenTree::Token( DUMMY_SP, - Token::lit(token::Str, Symbol::intern(&format!( + TokenKind::lit(token::Str, Symbol::intern(&format!( "assertion failed: {}", pprust::expr_to_string(&cond_expr).escape_debug() )), None), diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index cc05ecf8df5a6..119b83b7527b4 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -161,7 +161,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> let stream = vec![ Ident(ast::Ident::new(sym::doc, span), false), Eq, - Token::lit(token::Str, Symbol::intern(&escaped), None), + TokenKind::lit(token::Str, Symbol::intern(&escaped), None), ] .into_iter() .map(|token| tokenstream::TokenTree::Token(span, token)) @@ -220,7 +220,7 @@ impl ToInternal for TokenTree { }) if symbol.as_str().starts_with("-") => { let minus = BinOp(BinOpToken::Minus); let symbol = Symbol::intern(&symbol.as_str()[1..]); - let integer = Token::lit(token::Integer, symbol, suffix); + let integer = TokenKind::lit(token::Integer, symbol, suffix); let a = tokenstream::TokenTree::Token(span, minus); let b = tokenstream::TokenTree::Token(span, integer); return vec![a, b].into_iter().collect(); @@ -231,7 +231,7 @@ impl ToInternal for TokenTree { }) if symbol.as_str().starts_with("-") => { let minus = BinOp(BinOpToken::Minus); let symbol = Symbol::intern(&symbol.as_str()[1..]); - let float = Token::lit(token::Float, symbol, suffix); + let float = TokenKind::lit(token::Float, symbol, suffix); let a = tokenstream::TokenTree::Token(span, minus); let b = tokenstream::TokenTree::Token(span, float); return vec![a, b].into_iter().collect(); From a3425edb46dfcc7031068b8bdda868e5a3b16ae1 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Tue, 4 Jun 2019 18:48:40 +0300 Subject: [PATCH 03/14] syntax: Rename `TokenAndSpan` into `Token` --- src/librustc_save_analysis/span_utils.rs | 18 +-- src/librustdoc/html/highlight.rs | 30 ++-- .../passes/check_code_block_syntax.rs | 6 +- src/libsyntax/parse/lexer/mod.rs | 139 ++++++++---------- src/libsyntax/parse/lexer/tokentrees.rs | 4 +- src/libsyntax/parse/parser.rs | 30 ++-- src/libsyntax/parse/token.rs | 12 ++ src/libsyntax/tokenstream.rs | 1 - 8 files changed, 118 insertions(+), 122 deletions(-) diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index 5527fcb923b6f..5831b0bcd8fa3 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -60,11 +60,11 @@ impl<'a> SpanUtils<'a> { let mut toks = self.retokenise_span(span); loop { let next = toks.real_token(); - if next.tok == token::Eof { + if next == token::Eof { return None; } - if next.tok == tok { - return Some(next.sp); + if next == tok { + return Some(next.span); } } } @@ -74,12 +74,12 @@ impl<'a> SpanUtils<'a> { // let mut toks = self.retokenise_span(span); // loop { // let ts = toks.real_token(); - // if ts.tok == token::Eof { + // if ts == token::Eof { // return None; // } - // if ts.tok == token::Not { + // if ts == token::Not { // let ts = toks.real_token(); - // if ts.tok.is_ident() { + // if ts.kind.is_ident() { // return Some(ts.sp); // } else { // return None; @@ -93,12 +93,12 @@ impl<'a> SpanUtils<'a> { // let mut toks = self.retokenise_span(span); // let mut prev = toks.real_token(); // loop { - // if prev.tok == token::Eof { + // if prev == token::Eof { // return None; // } // let ts = toks.real_token(); - // if ts.tok == token::Not { - // if prev.tok.is_ident() { + // if ts == token::Not { + // if prev.kind.is_ident() { // return Some(prev.sp); // } else { // return None; diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 932419c78f22c..3b9de761828b7 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -12,8 +12,8 @@ use std::io; use std::io::prelude::*; use syntax::source_map::{SourceMap, FilePathMapping}; -use syntax::parse::lexer::{self, TokenAndSpan}; -use syntax::parse::token; +use syntax::parse::lexer; +use syntax::parse::token::{self, Token}; use syntax::parse; use syntax::symbol::{kw, sym}; use syntax_pos::{Span, FileName}; @@ -186,9 +186,9 @@ impl<'a> Classifier<'a> { } /// Gets the next token out of the lexer. - fn try_next_token(&mut self) -> Result { + fn try_next_token(&mut self) -> Result { match self.lexer.try_next_token() { - Ok(tas) => Ok(tas), + Ok(token) => Ok(token), Err(_) => Err(HighlightError::LexError), } } @@ -205,7 +205,7 @@ impl<'a> Classifier<'a> { -> Result<(), HighlightError> { loop { let next = self.try_next_token()?; - if next.tok == token::Eof { + if next == token::Eof { break; } @@ -218,9 +218,9 @@ impl<'a> Classifier<'a> { // Handles an individual token from the lexer. fn write_token(&mut self, out: &mut W, - tas: TokenAndSpan) + token: Token) -> Result<(), HighlightError> { - let klass = match tas.tok { + let klass = match token.kind { token::Shebang(s) => { out.string(Escape(&s.as_str()), Class::None)?; return Ok(()); @@ -234,7 +234,7 @@ impl<'a> Classifier<'a> { // reference or dereference operator or a reference or pointer type, instead of the // bit-and or multiplication operator. token::BinOp(token::And) | token::BinOp(token::Star) - if self.lexer.peek().tok != token::Whitespace => Class::RefKeyWord, + if self.lexer.peek().kind != token::Whitespace => Class::RefKeyWord, // Consider this as part of a macro invocation if there was a // leading identifier. @@ -257,7 +257,7 @@ impl<'a> Classifier<'a> { token::Question => Class::QuestionMark, token::Dollar => { - if self.lexer.peek().tok.is_ident() { + if self.lexer.peek().kind.is_ident() { self.in_macro_nonterminal = true; Class::MacroNonTerminal } else { @@ -280,9 +280,9 @@ impl<'a> Classifier<'a> { // as an attribute. // Case 1: #![inner_attribute] - if self.lexer.peek().tok == token::Not { + if self.lexer.peek() == token::Not { self.try_next_token()?; // NOTE: consumes `!` token! - if self.lexer.peek().tok == token::OpenDelim(token::Bracket) { + if self.lexer.peek() == token::OpenDelim(token::Bracket) { self.in_attribute = true; out.enter_span(Class::Attribute)?; } @@ -292,7 +292,7 @@ impl<'a> Classifier<'a> { } // Case 2: #[outer_attribute] - if self.lexer.peek().tok == token::OpenDelim(token::Bracket) { + if self.lexer.peek() == token::OpenDelim(token::Bracket) { self.in_attribute = true; out.enter_span(Class::Attribute)?; } @@ -335,13 +335,13 @@ impl<'a> Classifier<'a> { sym::Option | sym::Result => Class::PreludeTy, sym::Some | sym::None | sym::Ok | sym::Err => Class::PreludeVal, - _ if tas.tok.is_reserved_ident() => Class::KeyWord, + _ if token.kind.is_reserved_ident() => Class::KeyWord, _ => { if self.in_macro_nonterminal { self.in_macro_nonterminal = false; Class::MacroNonTerminal - } else if self.lexer.peek().tok == token::Not { + } else if self.lexer.peek() == token::Not { self.in_macro = true; Class::Macro } else { @@ -359,7 +359,7 @@ impl<'a> Classifier<'a> { // Anything that didn't return above is the simple case where we the // class just spans a single token, so we can use the `string` method. - out.string(Escape(&self.snip(tas.sp)), klass)?; + out.string(Escape(&self.snip(token.span)), klass)?; Ok(()) } diff --git a/src/librustdoc/passes/check_code_block_syntax.rs b/src/librustdoc/passes/check_code_block_syntax.rs index 0556852c54ac2..694843ad7f71e 100644 --- a/src/librustdoc/passes/check_code_block_syntax.rs +++ b/src/librustdoc/passes/check_code_block_syntax.rs @@ -1,5 +1,5 @@ use errors::Applicability; -use syntax::parse::lexer::{TokenAndSpan, StringReader as Lexer}; +use syntax::parse::lexer::{StringReader as Lexer}; use syntax::parse::{ParseSess, token}; use syntax::source_map::FilePathMapping; use syntax_pos::FileName; @@ -33,8 +33,8 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> { ); let errors = Lexer::new_or_buffered_errs(&sess, source_file, None).and_then(|mut lexer| { - while let Ok(TokenAndSpan { tok, .. }) = lexer.try_next_token() { - if tok == token::Eof { + while let Ok(token::Token { kind, .. }) = lexer.try_next_token() { + if kind == token::Eof { break; } } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index ca9199975bb7e..32d5b16dd714f 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1,6 +1,6 @@ use crate::ast::{self, Ident}; use crate::parse::ParseSess; -use crate::parse::token::{self, TokenKind}; +use crate::parse::token::{self, Token, TokenKind}; use crate::symbol::{sym, Symbol}; use crate::parse::unescape; use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char}; @@ -20,21 +20,6 @@ pub mod comments; mod tokentrees; mod unicode_chars; -#[derive(Clone, Debug)] -pub struct TokenAndSpan { - pub tok: TokenKind, - pub sp: Span, -} - -impl Default for TokenAndSpan { - fn default() -> Self { - TokenAndSpan { - tok: token::Whitespace, - sp: syntax_pos::DUMMY_SP, - } - } -} - #[derive(Clone, Debug)] pub struct UnmatchedBrace { pub expected_delim: token::DelimToken, @@ -87,7 +72,7 @@ impl<'a> StringReader<'a> { ident } - fn unwrap_or_abort(&mut self, res: Result) -> TokenAndSpan { + fn unwrap_or_abort(&mut self, res: Result) -> Token { match res { Ok(tok) => tok, Err(_) => { @@ -97,17 +82,17 @@ impl<'a> StringReader<'a> { } } - fn next_token(&mut self) -> TokenAndSpan where Self: Sized { + fn next_token(&mut self) -> Token where Self: Sized { let res = self.try_next_token(); self.unwrap_or_abort(res) } /// Returns the next token. EFFECT: advances the string_reader. - pub fn try_next_token(&mut self) -> Result { + pub fn try_next_token(&mut self) -> Result { assert!(self.fatal_errs.is_empty()); - let ret_val = TokenAndSpan { - tok: replace(&mut self.peek_tok, token::Whitespace), - sp: self.peek_span, + let ret_val = Token { + kind: replace(&mut self.peek_tok, token::Whitespace), + span: self.peek_span, }; self.advance_token()?; Ok(ret_val) @@ -135,10 +120,10 @@ impl<'a> StringReader<'a> { return None; } - fn try_real_token(&mut self) -> Result { + fn try_real_token(&mut self) -> Result { let mut t = self.try_next_token()?; loop { - match t.tok { + match t.kind { token::Whitespace | token::Comment | token::Shebang(_) => { t = self.try_next_token()?; } @@ -149,7 +134,7 @@ impl<'a> StringReader<'a> { Ok(t) } - pub fn real_token(&mut self) -> TokenAndSpan { + pub fn real_token(&mut self) -> Token { let res = self.try_real_token(); self.unwrap_or_abort(res) } @@ -194,11 +179,11 @@ impl<'a> StringReader<'a> { buffer } - pub fn peek(&self) -> TokenAndSpan { + pub fn peek(&self) -> Token { // FIXME(pcwalton): Bad copy! - TokenAndSpan { - tok: self.peek_tok.clone(), - sp: self.peek_span, + Token { + kind: self.peek_tok.clone(), + span: self.peek_span, } } @@ -341,9 +326,9 @@ impl<'a> StringReader<'a> { fn advance_token(&mut self) -> Result<(), ()> { match self.scan_whitespace_or_comment() { Some(comment) => { - self.peek_span_src_raw = comment.sp; - self.peek_span = comment.sp; - self.peek_tok = comment.tok; + self.peek_span_src_raw = comment.span; + self.peek_span = comment.span; + self.peek_tok = comment.kind; } None => { if self.is_eof() { @@ -527,7 +512,7 @@ impl<'a> StringReader<'a> { /// PRECONDITION: self.ch is not whitespace /// Eats any kind of comment. - fn scan_comment(&mut self) -> Option { + fn scan_comment(&mut self) -> Option { if let Some(c) = self.ch { if c.is_whitespace() { let msg = "called consume_any_line_comment, but there was whitespace"; @@ -563,14 +548,14 @@ impl<'a> StringReader<'a> { self.bump(); } - let tok = if doc_comment { + let kind = if doc_comment { self.with_str_from(start_bpos, |string| { token::DocComment(Symbol::intern(string)) }) } else { token::Comment }; - Some(TokenAndSpan { tok, sp: self.mk_sp(start_bpos, self.pos) }) + Some(Token { kind, span: self.mk_sp(start_bpos, self.pos) }) } Some('*') => { self.bump(); @@ -594,9 +579,9 @@ impl<'a> StringReader<'a> { while !self.ch_is('\n') && !self.is_eof() { self.bump(); } - return Some(TokenAndSpan { - tok: token::Shebang(self.name_from(start)), - sp: self.mk_sp(start, self.pos), + return Some(Token { + kind: token::Shebang(self.name_from(start)), + span: self.mk_sp(start, self.pos), }); } } @@ -608,7 +593,7 @@ impl<'a> StringReader<'a> { /// If there is whitespace, shebang, or a comment, scan it. Otherwise, /// return `None`. - fn scan_whitespace_or_comment(&mut self) -> Option { + fn scan_whitespace_or_comment(&mut self) -> Option { match self.ch.unwrap_or('\0') { // # to handle shebang at start of file -- this is the entry point // for skipping over all "junk" @@ -622,9 +607,9 @@ impl<'a> StringReader<'a> { while is_pattern_whitespace(self.ch) { self.bump(); } - let c = Some(TokenAndSpan { - tok: token::Whitespace, - sp: self.mk_sp(start_bpos, self.pos), + let c = Some(Token { + kind: token::Whitespace, + span: self.mk_sp(start_bpos, self.pos), }); debug!("scanning whitespace: {:?}", c); c @@ -634,7 +619,7 @@ impl<'a> StringReader<'a> { } /// Might return a sugared-doc-attr - fn scan_block_comment(&mut self) -> Option { + fn scan_block_comment(&mut self) -> Option { // block comments starting with "/**" or "/*!" are doc-comments let is_doc_comment = self.ch_is('*') || self.ch_is('!'); let start_bpos = self.pos - BytePos(2); @@ -671,7 +656,7 @@ impl<'a> StringReader<'a> { self.with_str_from(start_bpos, |string| { // but comments with only "*"s between two "/"s are not - let tok = if is_block_doc_comment(string) { + let kind = if is_block_doc_comment(string) { let string = if has_cr { self.translate_crlf(start_bpos, string, @@ -684,9 +669,9 @@ impl<'a> StringReader<'a> { token::Comment }; - Some(TokenAndSpan { - tok, - sp: self.mk_sp(start_bpos, self.pos), + Some(Token { + kind, + span: self.mk_sp(start_bpos, self.pos), }) }) } @@ -1611,26 +1596,26 @@ mod tests { "/* my source file */ fn main() { println!(\"zebra\"); }\n" .to_string()); let id = Ident::from_str("fn"); - assert_eq!(string_reader.next_token().tok, token::Comment); - assert_eq!(string_reader.next_token().tok, token::Whitespace); + assert_eq!(string_reader.next_token().kind, token::Comment); + assert_eq!(string_reader.next_token().kind, token::Whitespace); let tok1 = string_reader.next_token(); - let tok2 = TokenAndSpan { - tok: token::Ident(id, false), - sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION), + let tok2 = Token { + kind: token::Ident(id, false), + span: Span::new(BytePos(21), BytePos(23), NO_EXPANSION), }; - assert_eq!(tok1.tok, tok2.tok); - assert_eq!(tok1.sp, tok2.sp); - assert_eq!(string_reader.next_token().tok, token::Whitespace); + assert_eq!(tok1.kind, tok2.kind); + assert_eq!(tok1.span, tok2.span); + assert_eq!(string_reader.next_token().kind, token::Whitespace); // the 'main' id is already read: assert_eq!(string_reader.pos.clone(), BytePos(28)); // read another token: let tok3 = string_reader.next_token(); - let tok4 = TokenAndSpan { - tok: mk_ident("main"), - sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION), + let tok4 = Token { + kind: mk_ident("main"), + span: Span::new(BytePos(24), BytePos(28), NO_EXPANSION), }; - assert_eq!(tok3.tok, tok4.tok); - assert_eq!(tok3.sp, tok4.sp); + assert_eq!(tok3.kind, tok4.kind); + assert_eq!(tok3.span, tok4.span); // the lparen is already read: assert_eq!(string_reader.pos.clone(), BytePos(29)) }) @@ -1640,7 +1625,7 @@ mod tests { // of tokens (stop checking after exhausting the expected vec) fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec) { for expected_tok in &expected { - assert_eq!(&string_reader.next_token().tok, expected_tok); + assert_eq!(&string_reader.next_token().kind, expected_tok); } } @@ -1698,7 +1683,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().tok, + assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().kind, mk_lit(token::Char, "a", None)); }) } @@ -1708,7 +1693,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().tok, + assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().kind, mk_lit(token::Char, " ", None)); }) } @@ -1718,7 +1703,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().tok, + assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().kind, mk_lit(token::Char, "\\n", None)); }) } @@ -1728,7 +1713,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().tok, + assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().kind, token::Lifetime(Ident::from_str("'abc"))); }) } @@ -1738,7 +1723,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token().tok, + assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token().kind, mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None)); }) } @@ -1750,10 +1735,10 @@ mod tests { let sh = mk_sess(sm.clone()); macro_rules! test { ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ - assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().tok, + assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().kind, mk_lit(token::$tok_type, $tok_contents, Some("suffix"))); // with a whitespace separator: - assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().tok, + assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().kind, mk_lit(token::$tok_type, $tok_contents, None)); }} } @@ -1768,11 +1753,11 @@ mod tests { test!("1.0", Float, "1.0"); test!("1.0e10", Float, "1.0e10"); - assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().tok, + assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().kind, mk_lit(token::Integer, "2", Some("us"))); - assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok, + assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().kind, mk_lit(token::StrRaw(3), "raw", Some("suffix"))); - assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok, + assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().kind, mk_lit(token::ByteStrRaw(3), "raw", Some("suffix"))); }) } @@ -1790,11 +1775,11 @@ mod tests { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string()); - match lexer.next_token().tok { + match lexer.next_token().kind { token::Comment => {} _ => panic!("expected a comment!"), } - assert_eq!(lexer.next_token().tok, mk_lit(token::Char, "a", None)); + assert_eq!(lexer.next_token().kind, mk_lit(token::Char, "a", None)); }) } @@ -1805,10 +1790,10 @@ mod tests { let sh = mk_sess(sm.clone()); let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string()); let comment = lexer.next_token(); - assert_eq!(comment.tok, token::Comment); - assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7))); - assert_eq!(lexer.next_token().tok, token::Whitespace); - assert_eq!(lexer.next_token().tok, + assert_eq!(comment.kind, token::Comment); + assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7))); + assert_eq!(lexer.next_token().kind, token::Whitespace); + assert_eq!(lexer.next_token().kind, token::DocComment(Symbol::intern("/// test"))); }) } diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index b8cd32883b88c..767d37016da87 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -220,7 +220,7 @@ impl<'a> TokenTreesReader<'a> { fn real_token(&mut self) { let t = self.string_reader.real_token(); - self.token = t.tok; - self.span = t.sp; + self.token = t.kind; + self.span = t.span; } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 8fc02dd9259e0..3b7d4e14dbb40 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -36,9 +36,9 @@ use crate::{ast, attr}; use crate::ext::base::DummyResult; use crate::source_map::{self, SourceMap, Spanned, respan}; use crate::parse::{SeqSep, classify, literal, token}; -use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace}; +use crate::parse::lexer::UnmatchedBrace; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; -use crate::parse::token::DelimToken; +use crate::parse::token::{Token, DelimToken}; use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; use crate::util::parser::{AssocOp, Fixity}; use crate::print::pprust; @@ -295,7 +295,7 @@ impl TokenCursorFrame { } impl TokenCursor { - fn next(&mut self) -> TokenAndSpan { + fn next(&mut self) -> Token { loop { let tree = if !self.frame.open_delim { self.frame.open_delim = true; @@ -309,7 +309,7 @@ impl TokenCursor { self.frame = frame; continue } else { - return TokenAndSpan { tok: token::Eof, sp: DUMMY_SP } + return Token { kind: token::Eof, span: DUMMY_SP } }; match self.frame.last_token { @@ -318,7 +318,7 @@ impl TokenCursor { } match tree { - TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp }, + TokenTree::Token(span, kind) => return Token { kind, span }, TokenTree::Delimited(sp, delim, tts) => { let frame = TokenCursorFrame::new(sp, delim, &tts); self.stack.push(mem::replace(&mut self.frame, frame)); @@ -327,9 +327,9 @@ impl TokenCursor { } } - fn next_desugared(&mut self) -> TokenAndSpan { + fn next_desugared(&mut self) -> Token { let (sp, name) = match self.next() { - TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name), + Token { span, kind: token::DocComment(name) } => (span, name), tok => return tok, }; @@ -499,8 +499,8 @@ impl<'a> Parser<'a> { }; let tok = parser.next_tok(); - parser.token = tok.tok; - parser.span = tok.sp; + parser.token = tok.kind; + parser.span = tok.span; if let Some(directory) = directory { parser.directory = directory; @@ -515,15 +515,15 @@ impl<'a> Parser<'a> { parser } - fn next_tok(&mut self) -> TokenAndSpan { + fn next_tok(&mut self) -> Token { let mut next = if self.desugar_doc_comments { self.token_cursor.next_desugared() } else { self.token_cursor.next() }; - if next.sp.is_dummy() { + if next.span.is_dummy() { // Tweak the location for better diagnostics, but keep syntactic context intact. - next.sp = self.prev_span.with_ctxt(next.sp.ctxt()); + next.span = self.prev_span.with_ctxt(next.span.ctxt()); } next } @@ -1023,8 +1023,8 @@ impl<'a> Parser<'a> { }; let next = self.next_tok(); - self.span = next.sp; - self.token = next.tok; + self.token = next.kind; + self.span = next.span; self.expected_tokens.clear(); // check after each token self.process_potential_macro_variable(); @@ -1038,8 +1038,8 @@ impl<'a> Parser<'a> { // fortunately for tokens currently using `bump_with`, the // prev_token_kind will be of no use anyway. self.prev_token_kind = PrevTokenKind::Other; - self.span = span; self.token = next; + self.span = span; self.expected_tokens.clear(); } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index aa1e8fd060f78..3679e4050ff42 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -235,6 +235,12 @@ pub enum TokenKind { #[cfg(target_arch = "x86_64")] static_assert_size!(TokenKind, 16); +#[derive(Clone, Debug)] +pub struct Token { + pub kind: TokenKind, + pub span: Span, +} + impl TokenKind { /// Recovers a `TokenKind` from an `ast::Ident`. This creates a raw identifier if necessary. pub fn from_ast_ident(ident: ast::Ident) -> TokenKind { @@ -602,6 +608,12 @@ impl TokenKind { } } +impl PartialEq for Token { + fn eq(&self, rhs: &TokenKind) -> bool { + self.kind == *rhs + } +} + #[derive(Clone, RustcEncodable, RustcDecodable)] /// For interpolation during macro expansion. pub enum Nonterminal { diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 0f50f51f5d35c..654c21fd094e9 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -580,7 +580,6 @@ mod tests { use super::*; use crate::syntax::ast::Ident; use crate::with_default_globals; - use crate::parse::token::TokenKind; use crate::util::parser_testing::string_to_stream; use syntax_pos::{Span, BytePos, NO_EXPANSION}; From e0127dbf8135b766a332ce21c4eee48998b59bef Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Tue, 4 Jun 2019 20:42:43 +0300 Subject: [PATCH 04/14] syntax: Use `Token` in `TokenTree::Token` --- src/librustc/hir/lowering.rs | 12 +-- src/librustc/ich/impls_syntax.rs | 124 ++++++++++++------------ src/librustc_lint/builtin.rs | 4 +- src/librustdoc/html/highlight.rs | 4 +- src/libsyntax/attr/mod.rs | 37 ++++--- src/libsyntax/diagnostics/plugin.rs | 16 +-- src/libsyntax/ext/base.rs | 10 +- src/libsyntax/ext/expand.rs | 2 +- src/libsyntax/ext/tt/macro_parser.rs | 7 +- src/libsyntax/ext/tt/macro_rules.rs | 32 +++--- src/libsyntax/ext/tt/quoted.rs | 80 +++++++-------- src/libsyntax/ext/tt/transcribe.rs | 12 +-- src/libsyntax/feature_gate.rs | 8 +- src/libsyntax/lib.rs | 1 + src/libsyntax/mut_visit.rs | 6 +- src/libsyntax/parse/attr.rs | 2 +- src/libsyntax/parse/lexer/mod.rs | 40 ++++---- src/libsyntax/parse/lexer/tokentrees.rs | 2 +- src/libsyntax/parse/literal.rs | 2 +- src/libsyntax/parse/mod.rs | 31 +++--- src/libsyntax/parse/parser.rs | 20 ++-- src/libsyntax/parse/token.rs | 21 ++-- src/libsyntax/print/pprust.rs | 8 +- src/libsyntax/tokenstream.rs | 111 +++++++++++---------- src/libsyntax/visit.rs | 2 +- src/libsyntax_ext/asm.rs | 7 +- src/libsyntax_ext/assert.rs | 2 +- src/libsyntax_ext/concat_idents.rs | 6 +- src/libsyntax_ext/deriving/custom.rs | 2 +- src/libsyntax_ext/proc_macro_server.rs | 24 ++--- src/libsyntax_ext/trace_macros.rs | 4 +- 31 files changed, 327 insertions(+), 312 deletions(-) diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 919f682fc4f6f..e7f52b48cb9ed 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -67,7 +67,7 @@ use syntax::source_map::CompilerDesugaringKind::IfTemporary; use syntax::std_inject; use syntax::symbol::{kw, sym, Symbol}; use syntax::tokenstream::{TokenStream, TokenTree}; -use syntax::parse::token::{self, TokenKind}; +use syntax::parse::token::{self, Token}; use syntax::visit::{self, Visitor}; use syntax_pos::{DUMMY_SP, edition, Span}; @@ -1328,7 +1328,7 @@ impl<'a> LoweringContext<'a> { fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream { match tree { - TokenTree::Token(span, token) => self.lower_token(token, span), + TokenTree::Token(token) => self.lower_token(token), TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited( span, delim, @@ -1337,13 +1337,13 @@ impl<'a> LoweringContext<'a> { } } - fn lower_token(&mut self, token: TokenKind, span: Span) -> TokenStream { - match token { + fn lower_token(&mut self, token: Token) -> TokenStream { + match token.kind { token::Interpolated(nt) => { - let tts = nt.to_tokenstream(&self.sess.parse_sess, span); + let tts = nt.to_tokenstream(&self.sess.parse_sess, token.span); self.lower_token_stream(tts) } - other => TokenTree::Token(span, other).into(), + _ => TokenTree::Token(token).into(), } } diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index 8e2550d3c4537..a373f434bf71e 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -261,9 +261,8 @@ for tokenstream::TokenTree { hasher: &mut StableHasher) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { - tokenstream::TokenTree::Token(span, ref token) => { - span.hash_stable(hcx, hasher); - hash_token(token, hcx, hasher); + tokenstream::TokenTree::Token(ref token) => { + token.hash_stable(hcx, hasher); } tokenstream::TokenTree::Delimited(span, delim, ref tts) => { span.hash_stable(hcx, hasher); @@ -306,70 +305,75 @@ impl_stable_hash_for!(struct token::Lit { suffix }); -fn hash_token<'a, 'gcx, W: StableHasherResult>( - token: &token::TokenKind, - hcx: &mut StableHashingContext<'a>, - hasher: &mut StableHasher, -) { - mem::discriminant(token).hash_stable(hcx, hasher); - match *token { - token::Eq | - token::Lt | - token::Le | - token::EqEq | - token::Ne | - token::Ge | - token::Gt | - token::AndAnd | - token::OrOr | - token::Not | - token::Tilde | - token::At | - token::Dot | - token::DotDot | - token::DotDotDot | - token::DotDotEq | - token::Comma | - token::Semi | - token::Colon | - token::ModSep | - token::RArrow | - token::LArrow | - token::FatArrow | - token::Pound | - token::Dollar | - token::Question | - token::SingleQuote | - token::Whitespace | - token::Comment | - token::Eof => {} - - token::BinOp(bin_op_token) | - token::BinOpEq(bin_op_token) => { - std_hash::Hash::hash(&bin_op_token, hasher); - } +impl<'a> HashStable> for token::TokenKind { + fn hash_stable(&self, + hcx: &mut StableHashingContext<'a>, + hasher: &mut StableHasher) { + mem::discriminant(self).hash_stable(hcx, hasher); + match *self { + token::Eq | + token::Lt | + token::Le | + token::EqEq | + token::Ne | + token::Ge | + token::Gt | + token::AndAnd | + token::OrOr | + token::Not | + token::Tilde | + token::At | + token::Dot | + token::DotDot | + token::DotDotDot | + token::DotDotEq | + token::Comma | + token::Semi | + token::Colon | + token::ModSep | + token::RArrow | + token::LArrow | + token::FatArrow | + token::Pound | + token::Dollar | + token::Question | + token::SingleQuote | + token::Whitespace | + token::Comment | + token::Eof => {} + + token::BinOp(bin_op_token) | + token::BinOpEq(bin_op_token) => { + std_hash::Hash::hash(&bin_op_token, hasher); + } - token::OpenDelim(delim_token) | - token::CloseDelim(delim_token) => { - std_hash::Hash::hash(&delim_token, hasher); - } - token::Literal(lit) => lit.hash_stable(hcx, hasher), + token::OpenDelim(delim_token) | + token::CloseDelim(delim_token) => { + std_hash::Hash::hash(&delim_token, hasher); + } + token::Literal(lit) => lit.hash_stable(hcx, hasher), - token::Ident(ident, is_raw) => { - ident.name.hash_stable(hcx, hasher); - is_raw.hash_stable(hcx, hasher); - } - token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher), + token::Ident(ident, is_raw) => { + ident.name.hash_stable(hcx, hasher); + is_raw.hash_stable(hcx, hasher); + } + token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher), - token::Interpolated(_) => { - bug!("interpolated tokens should not be present in the HIR") - } + token::Interpolated(_) => { + bug!("interpolated tokens should not be present in the HIR") + } - token::DocComment(val) | - token::Shebang(val) => val.hash_stable(hcx, hasher), + token::DocComment(val) | + token::Shebang(val) => val.hash_stable(hcx, hasher), + } } } +impl_stable_hash_for!(struct token::Token { + kind, + span +}); + impl_stable_hash_for!(enum ::syntax::ast::NestedMetaItem { MetaItem(meta_item), Literal(lit) diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index 937085c8ad8e8..a3da97bd5db1e 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -1414,11 +1414,11 @@ impl KeywordIdents { fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: TokenStream) { for tt in tokens.into_trees() { match tt { - TokenTree::Token(span, tok) => match tok.ident() { + TokenTree::Token(token) => match token.ident() { // only report non-raw idents Some((ident, false)) => { self.check_ident_token(cx, UnderMacro(true), ast::Ident { - span: span.substitute_dummy(ident.span), + span: token.span.substitute_dummy(ident.span), ..ident }); } diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 3b9de761828b7..d68741233754b 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -234,7 +234,7 @@ impl<'a> Classifier<'a> { // reference or dereference operator or a reference or pointer type, instead of the // bit-and or multiplication operator. token::BinOp(token::And) | token::BinOp(token::Star) - if self.lexer.peek().kind != token::Whitespace => Class::RefKeyWord, + if self.lexer.peek() != token::Whitespace => Class::RefKeyWord, // Consider this as part of a macro invocation if there was a // leading identifier. @@ -335,7 +335,7 @@ impl<'a> Classifier<'a> { sym::Option | sym::Result => Class::PreludeTy, sym::Some | sym::None | sym::Ok | sym::Err => Class::PreludeVal, - _ if token.kind.is_reserved_ident() => Class::KeyWord, + _ if token.is_reserved_ident() => Class::KeyWord, _ => { if self.in_macro_nonterminal { diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index ade15f024a609..448061395afdc 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -20,7 +20,7 @@ use crate::source_map::{BytePos, Spanned, dummy_spanned}; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use crate::parse::parser::Parser; use crate::parse::{self, ParseSess, PResult}; -use crate::parse::token::{self, TokenKind}; +use crate::parse::token::{self, Token, TokenKind}; use crate::ptr::P; use crate::symbol::{sym, Symbol}; use crate::ThinVec; @@ -465,9 +465,9 @@ impl MetaItem { let mod_sep_span = Span::new(last_pos, segment.ident.span.lo(), segment.ident.span.ctxt()); - idents.push(TokenTree::Token(mod_sep_span, token::ModSep).into()); + idents.push(TokenTree::token(mod_sep_span, token::ModSep).into()); } - idents.push(TokenTree::Token(segment.ident.span, + idents.push(TokenTree::token(segment.ident.span, TokenKind::from_ast_ident(segment.ident)).into()); last_pos = segment.ident.span.hi(); } @@ -480,10 +480,10 @@ impl MetaItem { { // FIXME: Share code with `parse_path`. let path = match tokens.next() { - Some(TokenTree::Token(span, token @ token::Ident(..))) | - Some(TokenTree::Token(span, token @ token::ModSep)) => 'arm: { - let mut segments = if let token::Ident(ident, _) = token { - if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() { + Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) | + Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: { + let mut segments = if let token::Ident(ident, _) = kind { + if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() { tokens.next(); vec![PathSegment::from_ident(ident.with_span_pos(span))] } else { @@ -493,13 +493,12 @@ impl MetaItem { vec![PathSegment::path_root(span)] }; loop { - if let Some(TokenTree::Token(span, - token::Ident(ident, _))) = tokens.next() { + if let Some(TokenTree::Token(Token { kind: token::Ident(ident, _), span })) = tokens.next() { segments.push(PathSegment::from_ident(ident.with_span_pos(span))); } else { return None; } - if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() { + if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() { tokens.next(); } else { break; @@ -508,7 +507,7 @@ impl MetaItem { let span = span.with_hi(segments.last().unwrap().ident.span.hi()); Path { span, segments } } - Some(TokenTree::Token(_, token::Interpolated(nt))) => match *nt { + Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. })) => match *nt { token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident), token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()), token::Nonterminal::NtPath(ref path) => path.clone(), @@ -533,7 +532,7 @@ impl MetaItemKind { match *self { MetaItemKind::Word => TokenStream::empty(), MetaItemKind::NameValue(ref lit) => { - let mut vec = vec![TokenTree::Token(span, token::Eq).into()]; + let mut vec = vec![TokenTree::token(span, token::Eq).into()]; lit.tokens().append_to_tree_and_joint_vec(&mut vec); TokenStream::new(vec) } @@ -541,7 +540,7 @@ impl MetaItemKind { let mut tokens = Vec::new(); for (i, item) in list.iter().enumerate() { if i > 0 { - tokens.push(TokenTree::Token(span, token::Comma).into()); + tokens.push(TokenTree::token(span, token::Comma).into()); } item.tokens().append_to_tree_and_joint_vec(&mut tokens); } @@ -558,10 +557,10 @@ impl MetaItemKind { where I: Iterator, { let delimited = match tokens.peek().cloned() { - Some(TokenTree::Token(_, token::Eq)) => { + Some(TokenTree::Token(token)) if token == token::Eq => { tokens.next(); - return if let Some(TokenTree::Token(span, token)) = tokens.next() { - Lit::from_token(&token, span).ok().map(MetaItemKind::NameValue) + return if let Some(TokenTree::Token(token)) = tokens.next() { + Lit::from_token(&token, token.span).ok().map(MetaItemKind::NameValue) } else { None }; @@ -579,7 +578,7 @@ impl MetaItemKind { let item = NestedMetaItem::from_tokens(&mut tokens)?; result.push(item); match tokens.next() { - None | Some(TokenTree::Token(_, token::Comma)) => {} + None | Some(TokenTree::Token(Token { kind: token::Comma, .. })) => {} _ => return None, } } @@ -605,8 +604,8 @@ impl NestedMetaItem { fn from_tokens(tokens: &mut iter::Peekable) -> Option where I: Iterator, { - if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() { - if let Ok(lit) = Lit::from_token(&token, span) { + if let Some(TokenTree::Token(token)) = tokens.peek().cloned() { + if let Ok(lit) = Lit::from_token(&token, token.span) { tokens.next(); return Some(NestedMetaItem::Literal(lit)); } diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 0c57c23b2b5c4..b342e4bc47274 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -5,7 +5,7 @@ use crate::ast::{self, Ident, Name}; use crate::source_map; use crate::ext::base::{ExtCtxt, MacEager, MacResult}; use crate::ext::build::AstBuilder; -use crate::parse::token; +use crate::parse::token::{self, Token}; use crate::ptr::P; use crate::symbol::kw; use crate::tokenstream::{TokenTree}; @@ -34,7 +34,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>, token_tree: &[TokenTree]) -> Box { let code = match (token_tree.len(), token_tree.get(0)) { - (1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code, + (1, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. }))) => code, _ => unreachable!() }; @@ -72,12 +72,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>, token_tree.get(1), token_tree.get(2) ) { - (1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => { + (1, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })), None, None) => { (code, None) }, - (3, Some(&TokenTree::Token(_, token::Ident(ref code, _))), - Some(&TokenTree::Token(_, token::Comma)), - Some(&TokenTree::Token(_, token::Literal(token::Lit { symbol, .. })))) => { + (3, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })), + Some(&TokenTree::Token(Token { kind: token::Comma, .. })), + Some(&TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), .. }))) => { (code, Some(symbol)) } _ => unreachable!() @@ -143,9 +143,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>, let (crate_name, name) = match (&token_tree[0], &token_tree[2]) { ( // Crate name. - &TokenTree::Token(_, token::Ident(ref crate_name, _)), + &TokenTree::Token(Token { kind: token::Ident(ref crate_name, _), .. }), // DIAGNOSTICS ident. - &TokenTree::Token(_, token::Ident(ref name, _)) + &TokenTree::Token(Token { kind: token::Ident(ref name, _), .. }) ) => (*&crate_name, name), _ => unreachable!() }; diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 4b5b9ff7bbeee..0c2ab67240741 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -265,10 +265,12 @@ impl TTMacroExpander for F impl MutVisitor for AvoidInterpolatedIdents { fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) { - if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt { - if let token::NtIdent(ident, is_raw) = **nt { - *tt = tokenstream::TokenTree::Token(ident.span, - token::Ident(ident, is_raw)); + if let tokenstream::TokenTree::Token(token) = tt { + if let token::Interpolated(nt) = &token.kind { + if let token::NtIdent(ident, is_raw) = **nt { + *tt = tokenstream::TokenTree::token(ident.span, + token::Ident(ident, is_raw)); + } } } mut_visit::noop_visit_tt(tt, self) diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 7b158b65d1562..4396b9be9bbb0 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -585,7 +585,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } AttrProcMacro(ref mac, ..) => { self.gate_proc_macro_attr_item(attr.span, &item); - let item_tok = TokenTree::Token(DUMMY_SP, token::Interpolated(Lrc::new(match item { + let item_tok = TokenTree::token(DUMMY_SP, token::Interpolated(Lrc::new(match item { Annotatable::Item(item) => token::NtItem(item), Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()), Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()), diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index c22952ed7504b..6acdffedd6b1a 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -78,7 +78,7 @@ use crate::ast::Ident; use crate::ext::tt::quoted::{self, TokenTree}; use crate::parse::{Directory, ParseSess}; use crate::parse::parser::{Parser, PathStyle}; -use crate::parse::token::{self, DocComment, Nonterminal, TokenKind}; +use crate::parse::token::{self, DocComment, Nonterminal, Token, TokenKind}; use crate::print::pprust; use crate::symbol::{kw, sym, Symbol}; use crate::tokenstream::{DelimSpan, TokenStream}; @@ -609,7 +609,8 @@ fn inner_parse_loop<'root, 'tt>( // // At the beginning of the loop, if we reach the end of the delimited submatcher, // we pop the stack to backtrack out of the descent. - seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => { + seq @ TokenTree::Delimited(..) | + seq @ TokenTree::Token(Token { kind: DocComment(..), .. }) => { let lower_elts = mem::replace(&mut item.top_elts, Tt(seq)); let idx = item.idx; item.stack.push(MatcherTtFrame { @@ -621,7 +622,7 @@ fn inner_parse_loop<'root, 'tt>( } // We just matched a normal token. We can just advance the parser. - TokenTree::Token(_, ref t) if token_name_eq(t, token) => { + TokenTree::Token(t) if token_name_eq(&t, token) => { item.idx += 1; next_items.push(item); } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 9d3ea4d8645da..703ad0053a0ef 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -11,7 +11,7 @@ use crate::ext::tt::transcribe::transcribe; use crate::feature_gate::Features; use crate::parse::{Directory, ParseSess}; use crate::parse::parser::Parser; -use crate::parse::token::{self, NtTT}; +use crate::parse::token::{self, Token, NtTT}; use crate::parse::token::TokenKind::*; use crate::symbol::{Symbol, kw, sym}; use crate::tokenstream::{DelimSpan, TokenStream, TokenTree}; @@ -270,7 +270,7 @@ pub fn compile( quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition { tts: vec![ quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")), - quoted::TokenTree::Token(DUMMY_SP, token::FatArrow), + quoted::TokenTree::token(DUMMY_SP, token::FatArrow), quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")), ], separator: Some(if body.legacy { token::Semi } else { token::Comma }), @@ -279,7 +279,7 @@ pub fn compile( })), // to phase into semicolon-termination instead of semicolon-separation quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition { - tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)], + tts: vec![quoted::TokenTree::token(DUMMY_SP, token::Semi)], separator: None, op: quoted::KleeneOp::ZeroOrMore, num_captures: 0 @@ -613,7 +613,7 @@ impl FirstSets { if let (Some(ref sep), true) = (seq_rep.separator.clone(), subfirst.maybe_empty) { - first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone())); + first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone())); } // Reverse scan: Sequence comes before `first`. @@ -663,7 +663,7 @@ impl FirstSets { if let (Some(ref sep), true) = (seq_rep.separator.clone(), subfirst.maybe_empty) { - first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone())); + first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone())); } assert!(first.maybe_empty); @@ -869,7 +869,7 @@ fn check_matcher_core(sess: &ParseSess, let mut new; let my_suffix = if let Some(ref u) = seq_rep.separator { new = suffix_first.clone(); - new.add_one_maybe(TokenTree::Token(sp.entire(), u.clone())); + new.add_one_maybe(TokenTree::token(sp.entire(), u.clone())); &new } else { &suffix_first @@ -1015,7 +1015,7 @@ enum IsInFollow { fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { use quoted::TokenTree; - if let TokenTree::Token(_, token::CloseDelim(_)) = *tok { + if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok { // closing a token tree can never be matched by any fragment; // iow, we always require that `(` and `)` match, etc. IsInFollow::Yes @@ -1033,8 +1033,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { }, "stmt" | "expr" => { let tokens = vec!["`=>`", "`,`", "`;`"]; - match *tok { - TokenTree::Token(_, ref tok) => match *tok { + match tok { + TokenTree::Token(token) => match token.kind { FatArrow | Comma | Semi => IsInFollow::Yes, _ => IsInFollow::No(tokens), }, @@ -1043,8 +1043,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { }, "pat" => { let tokens = vec!["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"]; - match *tok { - TokenTree::Token(_, ref tok) => match *tok { + match tok { + TokenTree::Token(token) => match token.kind { FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes, Ident(i, false) if i.name == kw::If || i.name == kw::In => IsInFollow::Yes, @@ -1058,8 +1058,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { "`{`", "`[`", "`=>`", "`,`", "`>`","`=`", "`:`", "`;`", "`|`", "`as`", "`where`", ]; - match *tok { - TokenTree::Token(_, ref tok) => match *tok { + match tok { + TokenTree::Token(token) => match token.kind { OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) | Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi | @@ -1089,8 +1089,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { "vis" => { // Explicitly disallow `priv`, on the off chance it comes back. let tokens = vec!["`,`", "an ident", "a type"]; - match *tok { - TokenTree::Token(_, ref tok) => match *tok { + match tok { + TokenTree::Token(token) => match token.kind { Comma => IsInFollow::Yes, Ident(i, is_raw) if is_raw || i.name != kw::Priv => IsInFollow::Yes, @@ -1150,7 +1150,7 @@ fn is_legal_fragment_specifier(_sess: &ParseSess, fn quoted_tt_to_string(tt: "ed::TokenTree) -> String { match *tt { - quoted::TokenTree::Token(_, ref tok) => crate::print::pprust::token_to_string(tok), + quoted::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token), quoted::TokenTree::MetaVar(_, name) => format!("${}", name), quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \ diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index fe0cb56b29e30..9f4e35ad3d779 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -2,7 +2,8 @@ use crate::ast::NodeId; use crate::early_buffered_lints::BufferedEarlyLintId; use crate::ext::tt::macro_parser; use crate::feature_gate::Features; -use crate::parse::{token, ParseSess}; +use crate::parse::token::{self, Token, TokenKind}; +use crate::parse::ParseSess; use crate::print::pprust; use crate::tokenstream::{self, DelimSpan}; use crate::ast; @@ -39,7 +40,7 @@ impl Delimited { } else { span.with_lo(span.lo() + BytePos(self.delim.len() as u32)) }; - TokenTree::Token(open_span, self.open_token()) + TokenTree::token(open_span, self.open_token()) } /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter. @@ -49,7 +50,7 @@ impl Delimited { } else { span.with_lo(span.hi() - BytePos(self.delim.len() as u32)) }; - TokenTree::Token(close_span, self.close_token()) + TokenTree::token(close_span, self.close_token()) } } @@ -81,7 +82,7 @@ pub enum KleeneOp { /// are "first-class" token trees. Useful for parsing macros. #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum TokenTree { - Token(Span, token::TokenKind), + Token(Token), Delimited(DelimSpan, Lrc), /// A kleene-style repetition sequence Sequence(DelimSpan, Lrc), @@ -144,13 +145,17 @@ impl TokenTree { /// Retrieves the `TokenTree`'s span. pub fn span(&self) -> Span { match *self { - TokenTree::Token(sp, _) - | TokenTree::MetaVar(sp, _) - | TokenTree::MetaVarDecl(sp, _, _) => sp, - TokenTree::Delimited(sp, _) - | TokenTree::Sequence(sp, _) => sp.entire(), + TokenTree::Token(Token { span, .. }) + | TokenTree::MetaVar(span, _) + | TokenTree::MetaVarDecl(span, _, _) => span, + TokenTree::Delimited(span, _) + | TokenTree::Sequence(span, _) => span.entire(), } } + + crate fn token(span: Span, kind: TokenKind) -> TokenTree { + TokenTree::Token(Token { kind, span }) + } } /// Takes a `tokenstream::TokenStream` and returns a `Vec`. Specifically, this @@ -205,14 +210,14 @@ pub fn parse( match tree { TokenTree::MetaVar(start_sp, ident) if expect_matchers => { let span = match trees.next() { - Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { - Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() { + Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => match trees.next() { + Some(tokenstream::TokenTree::Token(token)) => match token.ident() { Some((kind, _)) => { - let span = end_sp.with_lo(start_sp.lo()); + let span = token.span.with_lo(start_sp.lo()); result.push(TokenTree::MetaVarDecl(span, ident, kind)); continue; } - _ => end_sp, + _ => token.span, }, tree => tree .as_ref() @@ -270,7 +275,7 @@ where // Depending on what `tree` is, we could be parsing different parts of a macro match tree { // `tree` is a `$` token. Look at the next token in `trees` - tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() { + tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() { // `tree` is followed by a delimited set of token trees. This indicates the beginning // of a repetition sequence in the macro (e.g. `$(pat)*`). Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => { @@ -316,33 +321,33 @@ where // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special // metavariable that names the crate of the invocation. - Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => { + Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => { let (ident, is_raw) = token.ident().unwrap(); - let span = ident_span.with_lo(span.lo()); + let span = token.span.with_lo(span.lo()); if ident.name == kw::Crate && !is_raw { let ident = ast::Ident::new(kw::DollarCrate, ident.span); - TokenTree::Token(span, token::Ident(ident, is_raw)) + TokenTree::token(span, token::Ident(ident, is_raw)) } else { TokenTree::MetaVar(span, ident) } } // `tree` is followed by a random token. This is an error. - Some(tokenstream::TokenTree::Token(span, tok)) => { + Some(tokenstream::TokenTree::Token(token)) => { let msg = format!( "expected identifier, found `{}`", - pprust::token_to_string(&tok) + pprust::token_to_string(&token), ); - sess.span_diagnostic.span_err(span, &msg); - TokenTree::MetaVar(span, ast::Ident::invalid()) + sess.span_diagnostic.span_err(token.span, &msg); + TokenTree::MetaVar(token.span, ast::Ident::invalid()) } // There are no more tokens. Just return the `$` we already have. - None => TokenTree::Token(span, token::Dollar), + None => TokenTree::token(span, token::Dollar), }, // `tree` is an arbitrary token. Keep it. - tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok), + tokenstream::TokenTree::Token(token) => TokenTree::Token(token), // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to // descend into the delimited set and further parse it. @@ -380,17 +385,14 @@ fn kleene_op(token: &token::TokenKind) -> Option { /// - Ok(Ok((op, span))) if the next token tree is a KleeneOp /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp /// - Err(span) if the next token tree is not a token -fn parse_kleene_op( - input: &mut I, - span: Span, -) -> Result, Span> +fn parse_kleene_op(input: &mut I, span: Span) -> Result, Span> where I: Iterator, { match input.next() { - Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) { - Some(op) => Ok(Ok((op, span))), - None => Ok(Err((tok, span))), + Some(tokenstream::TokenTree::Token(token)) => match kleene_op(&token) { + Some(op) => Ok(Ok((op, token.span))), + None => Ok(Err(token)), }, tree => Err(tree .as_ref() @@ -466,7 +468,7 @@ where assert_eq!(op, KleeneOp::ZeroOrOne); // Lookahead at #2. If it is a KleenOp, then #1 is a separator. - let is_1_sep = if let Some(&tokenstream::TokenTree::Token(_, ref tok2)) = input.peek() { + let is_1_sep = if let Some(tokenstream::TokenTree::Token(tok2)) = input.peek() { kleene_op(tok2).is_some() } else { false @@ -504,7 +506,7 @@ where } // #2 is a random token (this is an error) :( - Ok(Err((_, _))) => op1_span, + Ok(Err(_)) => op1_span, // #2 is not even a token at all :( Err(_) => op1_span, @@ -524,7 +526,7 @@ where } // #1 is a separator followed by #2, a KleeneOp - Ok(Err((tok, span))) => match parse_kleene_op(input, span) { + Ok(Err(token)) => match parse_kleene_op(input, token.span) { // #2 is a `?`, which is not allowed as a Kleene op in 2015 edition, // but is allowed in the 2018 edition Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => { @@ -539,10 +541,10 @@ where } // #2 is a KleeneOp :D - Ok(Ok((op, _))) => return (Some(tok), op), + Ok(Ok((op, _))) => return (Some(token.kind), op), // #2 is a random token :( - Ok(Err((_, span))) => span, + Ok(Err(token)) => token.span, // #2 is not a token at all :( Err(span) => span, @@ -580,12 +582,12 @@ where Ok(Ok((op, _))) => return (None, op), // #1 is a separator followed by #2, a KleeneOp - Ok(Err((tok, span))) => match parse_kleene_op(input, span) { + Ok(Err(token)) => match parse_kleene_op(input, token.span) { // #2 is the `?` Kleene op, which does not take a separator (error) Ok(Ok((op, _op2_span))) if op == KleeneOp::ZeroOrOne => { // Error! sess.span_diagnostic.span_err( - span, + token.span, "the `?` macro repetition operator does not take a separator", ); @@ -594,10 +596,10 @@ where } // #2 is a KleeneOp :D - Ok(Ok((op, _))) => return (Some(tok), op), + Ok(Ok((op, _))) => return (Some(token.kind), op), // #2 is a random token :( - Ok(Err((_, span))) => span, + Ok(Err(token)) => token.span, // #2 is not a token at all :( Err(span) => span, diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 1b169d7696af3..1dbb0638df195 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -119,7 +119,7 @@ pub fn transcribe( Some((tt, _)) => tt.span(), None => DUMMY_SP, }; - result.push(TokenTree::Token(prev_span, sep).into()); + result.push(TokenTree::token(prev_span, sep).into()); } continue; } @@ -225,7 +225,7 @@ pub fn transcribe( result.push(tt.clone().into()); } else { sp = sp.apply_mark(cx.current_expansion.mark); - let token = TokenTree::Token(sp, token::Interpolated(nt.clone())); + let token = TokenTree::token(sp, token::Interpolated(nt.clone())); result.push(token.into()); } } else { @@ -241,8 +241,8 @@ pub fn transcribe( let ident = Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark)); sp = sp.apply_mark(cx.current_expansion.mark); - result.push(TokenTree::Token(sp, token::Dollar).into()); - result.push(TokenTree::Token(sp, token::TokenKind::from_ast_ident(ident)).into()); + result.push(TokenTree::token(sp, token::Dollar).into()); + result.push(TokenTree::token(sp, token::TokenKind::from_ast_ident(ident)).into()); } } @@ -259,9 +259,9 @@ pub fn transcribe( // Nothing much to do here. Just push the token to the result, being careful to // preserve syntax context. - quoted::TokenTree::Token(sp, tok) => { + quoted::TokenTree::Token(token) => { let mut marker = Marker(cx.current_expansion.mark); - let mut tt = TokenTree::Token(sp, tok); + let mut tt = TokenTree::Token(token); noop_visit_tt(&mut tt, &mut marker); result.push(tt.into()); } diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 4a95b6f69a161..64415204047ba 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -1958,9 +1958,11 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { name, template ), - None => if let Some(TokenTree::Token(_, token::Eq)) = attr.tokens.trees().next() { - // All key-value attributes are restricted to meta-item syntax. - attr.parse_meta(self.context.parse_sess).map_err(|mut err| err.emit()).ok(); + None => if let Some(TokenTree::Token(token)) = attr.tokens.trees().next() { + if token == token::Eq { + // All key-value attributes are restricted to meta-item syntax. + attr.parse_meta(self.context.parse_sess).map_err(|mut err| err.emit()).ok(); + } } } } diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 4229121b3d075..6882586ed2cd2 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -10,6 +10,7 @@ #![deny(rust_2018_idioms)] #![deny(internal)] +#![feature(bind_by_move_pattern_guards)] #![feature(crate_visibility_modifier)] #![feature(label_break_value)] #![feature(nll)] diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index 289f2c0ce4864..ad6d3f71c652e 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -9,7 +9,7 @@ use crate::ast::*; use crate::source_map::{Spanned, respan}; -use crate::parse::token::{self, TokenKind}; +use crate::parse::token::{self, Token, TokenKind}; use crate::ptr::P; use crate::ThinVec; use crate::tokenstream::*; @@ -576,9 +576,9 @@ pub fn noop_visit_arg(Arg { id, pat, ty }: &mut Arg, vis: &mut T) pub fn noop_visit_tt(tt: &mut TokenTree, vis: &mut T) { match tt { - TokenTree::Token(span, tok) => { + TokenTree::Token(Token { kind, span }) => { + vis.visit_token(kind); vis.visit_span(span); - vis.visit_token(tok); } TokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => { vis.visit_span(open); diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index e99a86e807f7f..9b78b56041f21 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -157,7 +157,7 @@ impl<'a> Parser<'a> { self.check(&token::OpenDelim(DelimToken::Brace)) { self.parse_token_tree().into() } else if self.eat(&token::Eq) { - let eq = TokenTree::Token(self.prev_span, token::Eq); + let eq = TokenTree::token(self.prev_span, token::Eq); let mut is_interpolated_expr = false; if let token::Interpolated(nt) = &self.token { if let token::NtExpr(..) = **nt { diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 32d5b16dd714f..225db0164fe65 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1596,8 +1596,8 @@ mod tests { "/* my source file */ fn main() { println!(\"zebra\"); }\n" .to_string()); let id = Ident::from_str("fn"); - assert_eq!(string_reader.next_token().kind, token::Comment); - assert_eq!(string_reader.next_token().kind, token::Whitespace); + assert_eq!(string_reader.next_token(), token::Comment); + assert_eq!(string_reader.next_token(), token::Whitespace); let tok1 = string_reader.next_token(); let tok2 = Token { kind: token::Ident(id, false), @@ -1605,7 +1605,7 @@ mod tests { }; assert_eq!(tok1.kind, tok2.kind); assert_eq!(tok1.span, tok2.span); - assert_eq!(string_reader.next_token().kind, token::Whitespace); + assert_eq!(string_reader.next_token(), token::Whitespace); // the 'main' id is already read: assert_eq!(string_reader.pos.clone(), BytePos(28)); // read another token: @@ -1625,7 +1625,7 @@ mod tests { // of tokens (stop checking after exhausting the expected vec) fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec) { for expected_tok in &expected { - assert_eq!(&string_reader.next_token().kind, expected_tok); + assert_eq!(&string_reader.next_token(), expected_tok); } } @@ -1683,7 +1683,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(), mk_lit(token::Char, "a", None)); }) } @@ -1693,7 +1693,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(), mk_lit(token::Char, " ", None)); }) } @@ -1703,7 +1703,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token(), mk_lit(token::Char, "\\n", None)); }) } @@ -1713,7 +1713,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token(), token::Lifetime(Ident::from_str("'abc"))); }) } @@ -1723,7 +1723,7 @@ mod tests { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(), mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None)); }) } @@ -1735,10 +1735,10 @@ mod tests { let sh = mk_sess(sm.clone()); macro_rules! test { ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ - assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().kind, + assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token(), mk_lit(token::$tok_type, $tok_contents, Some("suffix"))); // with a whitespace separator: - assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().kind, + assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token(), mk_lit(token::$tok_type, $tok_contents, None)); }} } @@ -1753,11 +1753,11 @@ mod tests { test!("1.0", Float, "1.0"); test!("1.0e10", Float, "1.0e10"); - assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token(), mk_lit(token::Integer, "2", Some("us"))); - assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(), mk_lit(token::StrRaw(3), "raw", Some("suffix"))); - assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().kind, + assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(), mk_lit(token::ByteStrRaw(3), "raw", Some("suffix"))); }) } @@ -1775,11 +1775,8 @@ mod tests { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string()); - match lexer.next_token().kind { - token::Comment => {} - _ => panic!("expected a comment!"), - } - assert_eq!(lexer.next_token().kind, mk_lit(token::Char, "a", None)); + assert_eq!(lexer.next_token(), token::Comment); + assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None)); }) } @@ -1792,9 +1789,8 @@ mod tests { let comment = lexer.next_token(); assert_eq!(comment.kind, token::Comment); assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7))); - assert_eq!(lexer.next_token().kind, token::Whitespace); - assert_eq!(lexer.next_token().kind, - token::DocComment(Symbol::intern("/// test"))); + assert_eq!(lexer.next_token(), token::Whitespace); + assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test"))); }) } } diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 767d37016da87..abff7177abd13 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -203,7 +203,7 @@ impl<'a> TokenTreesReader<'a> { Err(err) }, _ => { - let tt = TokenTree::Token(self.span, self.token.clone()); + let tt = TokenTree::token(self.span, self.token.clone()); // Note that testing for joint-ness here is done via the raw // source span as the joint-ness is a property of the raw source // rather than wanting to take `override_span` into account. diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 945475ff9818b..4b8ef20180f63 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -261,7 +261,7 @@ impl Lit { token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false), _ => token::Literal(self.token), }; - TokenTree::Token(self.span, token).into() + TokenTree::token(self.span, token).into() } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 7f8b96508bdd9..398b4b1da17b0 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -385,6 +385,7 @@ mod tests { use crate::ast::{self, Ident, PatKind}; use crate::attr::first_attr_value_str_by_name; use crate::ptr::P; + use crate::parse::token::Token; use crate::print::pprust::item_to_string; use crate::tokenstream::{DelimSpan, TokenTree}; use crate::util::parser_testing::string_to_stream; @@ -426,9 +427,9 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { ( 4, - Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))), - Some(&TokenTree::Token(_, token::Not)), - Some(&TokenTree::Token(_, token::Ident(name_zip, false))), + Some(&TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. })), + Some(&TokenTree::Token(Token { kind: token::Not, .. })), + Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })), Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)), ) if name_macro_rules.name == sym::macro_rules @@ -438,7 +439,7 @@ mod tests { ( 3, Some(&TokenTree::Delimited(_, first_delim, ref first_tts)), - Some(&TokenTree::Token(_, token::FatArrow)), + Some(&TokenTree::Token(Token { kind: token::FatArrow, .. })), Some(&TokenTree::Delimited(_, second_delim, ref second_tts)), ) if macro_delim == token::Paren => { @@ -446,8 +447,8 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1)) { ( 2, - Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident, false))), + Some(&TokenTree::Token(Token { kind: token::Dollar, .. })), + Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })), ) if first_delim == token::Paren && ident.name.as_str() == "a" => {}, _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), @@ -456,8 +457,8 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1)) { ( 2, - Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident, false))), + Some(&TokenTree::Token(Token { kind: token::Dollar, .. })), + Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })), ) if second_delim == token::Paren && ident.name.as_str() == "a" => {}, _ => panic!("value 4: {:?} {:?}", second_delim, second_tts), @@ -477,16 +478,16 @@ mod tests { let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); let expected = TokenStream::new(vec![ - TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(), - TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(), + TokenTree::token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(), + TokenTree::token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(), TokenTree::Delimited( DelimSpan::from_pair(sp(5, 6), sp(13, 14)), token::DelimToken::Paren, TokenStream::new(vec![ - TokenTree::Token(sp(6, 7), + TokenTree::token(sp(6, 7), token::Ident(Ident::from_str("b"), false)).into(), - TokenTree::Token(sp(8, 9), token::Colon).into(), - TokenTree::Token(sp(10, 13), + TokenTree::token(sp(8, 9), token::Colon).into(), + TokenTree::token(sp(10, 13), token::Ident(Ident::from_str("i32"), false)).into(), ]).into(), ).into(), @@ -494,9 +495,9 @@ mod tests { DelimSpan::from_pair(sp(15, 16), sp(20, 21)), token::DelimToken::Brace, TokenStream::new(vec![ - TokenTree::Token(sp(17, 18), + TokenTree::token(sp(17, 18), token::Ident(Ident::from_str("b"), false)).into(), - TokenTree::Token(sp(18, 19), token::Semi).into(), + TokenTree::token(sp(18, 19), token::Semi).into(), ]).into(), ).into() ]); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 3b7d4e14dbb40..eda67b3a93d8e 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -318,7 +318,7 @@ impl TokenCursor { } match tree { - TokenTree::Token(span, kind) => return Token { kind, span }, + TokenTree::Token(token) => return token, TokenTree::Delimited(sp, delim, tts) => { let frame = TokenCursorFrame::new(sp, delim, &tts); self.stack.push(mem::replace(&mut self.frame, frame)); @@ -353,9 +353,9 @@ impl TokenCursor { delim_span, token::Bracket, [ - TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)), - TokenTree::Token(sp, token::Eq), - TokenTree::Token(sp, token::TokenKind::lit( + TokenTree::token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)), + TokenTree::token(sp, token::Eq), + TokenTree::token(sp, token::TokenKind::lit( token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None )), ] @@ -366,10 +366,10 @@ impl TokenCursor { delim_span, token::NoDelim, &if doc_comment_style(&name.as_str()) == AttrStyle::Inner { - [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body] + [TokenTree::token(sp, token::Pound), TokenTree::token(sp, token::Not), body] .iter().cloned().collect::().into() } else { - [TokenTree::Token(sp, token::Pound), body] + [TokenTree::token(sp, token::Pound), body] .iter().cloned().collect::().into() }, ))); @@ -1052,7 +1052,7 @@ impl<'a> Parser<'a> { f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { Some(tree) => match tree { - TokenTree::Token(_, tok) => tok, + TokenTree::Token(token) => token.kind, TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim), }, None => token::CloseDelim(self.token_cursor.frame.delim), @@ -1065,7 +1065,7 @@ impl<'a> Parser<'a> { } match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { - Some(TokenTree::Token(span, _)) => span, + Some(TokenTree::Token(token)) => token.span, Some(TokenTree::Delimited(span, ..)) => span.entire(), None => self.look_ahead_span(dist - 1), } @@ -2675,7 +2675,7 @@ impl<'a> Parser<'a> { _ => { let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span); self.bump(); - TokenTree::Token(span, token) + TokenTree::token(span, token) } } } @@ -4344,7 +4344,7 @@ impl<'a> Parser<'a> { }; TokenStream::new(vec![ args.into(), - TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(), + TokenTree::token(token_lo.to(self.prev_span), token::FatArrow).into(), body.into(), ]) } else { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 3679e4050ff42..a06bf9fae7c29 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -18,6 +18,7 @@ use log::info; use std::fmt; use std::mem; +use std::ops::Deref; #[cfg(target_arch = "x86_64")] use rustc_data_structures::static_assert_size; use rustc_data_structures::sync::Lrc; @@ -165,7 +166,7 @@ fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool { ].contains(&ident.name) } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub enum TokenKind { /* Expression-operator symbols. */ Eq, @@ -235,7 +236,7 @@ pub enum TokenKind { #[cfg(target_arch = "x86_64")] static_assert_size!(TokenKind, 16); -#[derive(Clone, Debug)] +#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub struct Token { pub kind: TokenKind, pub span: Span, @@ -614,6 +615,14 @@ impl PartialEq for Token { } } +// FIXME: Remove this after all necessary methods are moved from `TokenKind` to `Token`. +impl Deref for Token { + type Target = TokenKind; + fn deref(&self) -> &Self::Target { + &self.kind + } +} + #[derive(Clone, RustcEncodable, RustcDecodable)] /// For interpolation during macro expansion. pub enum Nonterminal { @@ -704,11 +713,11 @@ impl Nonterminal { } Nonterminal::NtIdent(ident, is_raw) => { let token = Ident(ident, is_raw); - Some(TokenTree::Token(ident.span, token).into()) + Some(TokenTree::token(ident.span, token).into()) } Nonterminal::NtLifetime(ident) => { let token = Lifetime(ident); - Some(TokenTree::Token(ident.span, token).into()) + Some(TokenTree::token(ident.span, token).into()) } Nonterminal::NtTT(ref tt) => { Some(tt.clone().into()) @@ -794,7 +803,7 @@ fn prepend_attrs(sess: &ParseSess, if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() { let ident = attr.path.segments[0].ident; let token = Ident(ident, ident.as_str().starts_with("r#")); - brackets.push(tokenstream::TokenTree::Token(ident.span, token)); + brackets.push(tokenstream::TokenTree::token(ident.span, token)); // ... and for more complicated paths, fall back to a reparse hack that // should eventually be removed. @@ -808,7 +817,7 @@ fn prepend_attrs(sess: &ParseSess, // The span we list here for `#` and for `[ ... ]` are both wrong in // that it encompasses more than each token, but it hopefully is "good // enough" for now at least. - builder.push(tokenstream::TokenTree::Token(attr.span, Pound)); + builder.push(tokenstream::TokenTree::token(attr.span, Pound)); let delim_span = DelimSpan::from_single(attr.span); builder.push(tokenstream::TokenTree::Delimited( delim_span, DelimToken::Bracket, brackets.build().into())); diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index cd7106191bee2..07acfb5dc86c3 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -724,10 +724,10 @@ pub trait PrintState<'a> { /// expression arguments as expressions). It can be done! I think. fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> { match tt { - TokenTree::Token(_, ref tk) => { - self.writer().word(token_to_string(tk))?; - match *tk { - parse::token::DocComment(..) => { + TokenTree::Token(ref token) => { + self.writer().word(token_to_string(&token))?; + match token.kind { + token::DocComment(..) => { self.writer().hardbreak() } _ => Ok(()) diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 654c21fd094e9..e6fe33d6ccf26 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -16,7 +16,7 @@ use crate::ext::base; use crate::ext::tt::{macro_parser, quoted}; use crate::parse::Directory; -use crate::parse::token::{self, DelimToken, TokenKind}; +use crate::parse::token::{self, DelimToken, Token, TokenKind}; use crate::print::pprust; use syntax_pos::{BytePos, Mark, Span, DUMMY_SP}; @@ -44,7 +44,7 @@ use std::{fmt, iter, mem}; #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum TokenTree { /// A single token - Token(Span, token::TokenKind), + Token(Token), /// A delimited sequence of token trees Delimited(DelimSpan, DelimToken, TokenStream), } @@ -53,8 +53,7 @@ pub enum TokenTree { #[cfg(parallel_compiler)] fn _dummy() where - Span: Send + Sync, - token::TokenKind: Send + Sync, + Token: Send + Sync, DelimSpan: Send + Sync, DelimToken: Send + Sync, TokenStream: Send + Sync, @@ -86,12 +85,11 @@ impl TokenTree { /// Checks if this TokenTree is equal to the other, regardless of span information. pub fn eq_unspanned(&self, other: &TokenTree) -> bool { match (self, other) { - (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2, - (&TokenTree::Delimited(_, delim, ref tts), - &TokenTree::Delimited(_, delim2, ref tts2)) => { + (TokenTree::Token(token), TokenTree::Token(token2)) => token.kind == token2.kind, + (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => { delim == delim2 && tts.eq_unspanned(&tts2) } - (_, _) => false, + _ => false, } } @@ -102,37 +100,36 @@ impl TokenTree { // different method. pub fn probably_equal_for_proc_macro(&self, other: &TokenTree) -> bool { match (self, other) { - (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => { - tk.probably_equal_for_proc_macro(tk2) + (TokenTree::Token(token), TokenTree::Token(token2)) => { + token.probably_equal_for_proc_macro(token2) } - (&TokenTree::Delimited(_, delim, ref tts), - &TokenTree::Delimited(_, delim2, ref tts2)) => { + (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => { delim == delim2 && tts.probably_equal_for_proc_macro(&tts2) } - (_, _) => false, + _ => false, } } /// Retrieves the TokenTree's span. pub fn span(&self) -> Span { - match *self { - TokenTree::Token(sp, _) => sp, + match self { + TokenTree::Token(token) => token.span, TokenTree::Delimited(sp, ..) => sp.entire(), } } /// Modify the `TokenTree`'s span in-place. pub fn set_span(&mut self, span: Span) { - match *self { - TokenTree::Token(ref mut sp, _) => *sp = span, - TokenTree::Delimited(ref mut sp, ..) => *sp = DelimSpan::from_single(span), + match self { + TokenTree::Token(token) => token.span = span, + TokenTree::Delimited(dspan, ..) => *dspan = DelimSpan::from_single(span), } } /// Indicates if the stream is a token that is equal to the provided token. pub fn eq_token(&self, t: TokenKind) -> bool { - match *self { - TokenTree::Token(_, ref tk) => *tk == t, + match self { + TokenTree::Token(token) => *token == t, _ => false, } } @@ -141,6 +138,10 @@ impl TokenTree { TokenStream::new(vec![(self, Joint)]) } + pub fn token(span: Span, kind: TokenKind) -> TokenTree { + TokenTree::Token(Token { kind, span }) + } + /// Returns the opening delimiter as a token tree. pub fn open_tt(span: Span, delim: DelimToken) -> TokenTree { let open_span = if span.is_dummy() { @@ -148,7 +149,7 @@ impl TokenTree { } else { span.with_hi(span.lo() + BytePos(delim.len() as u32)) }; - TokenTree::Token(open_span, token::OpenDelim(delim)) + TokenTree::token(open_span, token::OpenDelim(delim)) } /// Returns the closing delimiter as a token tree. @@ -158,7 +159,7 @@ impl TokenTree { } else { span.with_lo(span.hi() - BytePos(delim.len() as u32)) }; - TokenTree::Token(close_span, token::CloseDelim(delim)) + TokenTree::token(close_span, token::CloseDelim(delim)) } } @@ -201,18 +202,17 @@ impl TokenStream { while let Some((pos, ts)) = iter.next() { if let Some((_, next)) = iter.peek() { let sp = match (&ts, &next) { - (_, (TokenTree::Token(_, token::Comma), _)) => continue, - ((TokenTree::Token(sp, token_left), NonJoint), - (TokenTree::Token(_, token_right), _)) + (_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue, + ((TokenTree::Token(token_left), NonJoint), (TokenTree::Token(token_right), _)) if ((token_left.is_ident() && !token_left.is_reserved_ident()) || token_left.is_lit()) && ((token_right.is_ident() && !token_right.is_reserved_ident()) - || token_right.is_lit()) => *sp, + || token_right.is_lit()) => token_left.span, ((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(), _ => continue, }; let sp = sp.shrink_to_hi(); - let comma = (TokenTree::Token(sp, token::Comma), NonJoint); + let comma = (TokenTree::token(sp, token::Comma), NonJoint); suggestion = Some((pos, comma, sp)); } } @@ -241,12 +241,6 @@ impl From for TreeAndJoint { } } -impl From for TokenStream { - fn from(token: TokenKind) -> TokenStream { - TokenTree::Token(DUMMY_SP, token).into() - } -} - impl> iter::FromIterator for TokenStream { fn from_iter>(iter: I) -> Self { TokenStream::from_streams(iter.into_iter().map(Into::into).collect::>()) @@ -349,22 +343,25 @@ impl TokenStream { // streams, making a comparison between a token stream generated from an // AST and a token stream which was parsed into an AST more reliable. fn semantic_tree(tree: &TokenTree) -> bool { - match tree { - // The pretty printer tends to add trailing commas to - // everything, and in particular, after struct fields. - | TokenTree::Token(_, token::Comma) - // The pretty printer emits `NoDelim` as whitespace. - | TokenTree::Token(_, token::OpenDelim(DelimToken::NoDelim)) - | TokenTree::Token(_, token::CloseDelim(DelimToken::NoDelim)) - // The pretty printer collapses many semicolons into one. - | TokenTree::Token(_, token::Semi) - // The pretty printer collapses whitespace arbitrarily and can - // introduce whitespace from `NoDelim`. - | TokenTree::Token(_, token::Whitespace) - // The pretty printer can turn `$crate` into `::crate_name` - | TokenTree::Token(_, token::ModSep) => false, - _ => true + if let TokenTree::Token(token) = tree { + if let + // The pretty printer tends to add trailing commas to + // everything, and in particular, after struct fields. + | token::Comma + // The pretty printer emits `NoDelim` as whitespace. + | token::OpenDelim(DelimToken::NoDelim) + | token::CloseDelim(DelimToken::NoDelim) + // The pretty printer collapses many semicolons into one. + | token::Semi + // The pretty printer collapses whitespace arbitrarily and can + // introduce whitespace from `NoDelim`. + | token::Whitespace + // The pretty printer can turn `$crate` into `::crate_name` + | token::ModSep = token.kind { + return false; + } } + true } let mut t1 = self.trees().filter(semantic_tree); @@ -430,13 +427,13 @@ impl TokenStreamBuilder { pub fn push>(&mut self, stream: T) { let stream = stream.into(); let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint); - if let Some(TokenTree::Token(last_span, last_tok)) = last_tree_if_joint { - if let Some((TokenTree::Token(span, tok), is_joint)) = stream.first_tree_and_joint() { - if let Some(glued_tok) = last_tok.glue(tok) { + if let Some(TokenTree::Token(last_token)) = last_tree_if_joint { + if let Some((TokenTree::Token(token), is_joint)) = stream.first_tree_and_joint() { + if let Some(glued_tok) = last_token.kind.glue(token.kind) { let last_stream = self.0.pop().unwrap(); self.push_all_but_last_tree(&last_stream); - let glued_span = last_span.to(span); - let glued_tt = TokenTree::Token(glued_span, glued_tok); + let glued_span = last_token.span.to(token.span); + let glued_tt = TokenTree::token(glued_span, glued_tok); let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]); self.0.push(glued_tokenstream); self.push_all_but_first_tree(&stream); @@ -663,7 +660,7 @@ mod tests { with_default_globals(|| { let test0: TokenStream = Vec::::new().into_iter().collect(); let test1: TokenStream = - TokenTree::Token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into(); + TokenTree::token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into(); let test2 = string_to_ts("foo(bar::baz)"); assert_eq!(test0.is_empty(), true); @@ -676,9 +673,9 @@ mod tests { fn test_dotdotdot() { with_default_globals(|| { let mut builder = TokenStreamBuilder::new(); - builder.push(TokenTree::Token(sp(0, 1), token::Dot).joint()); - builder.push(TokenTree::Token(sp(1, 2), token::Dot).joint()); - builder.push(TokenTree::Token(sp(2, 3), token::Dot)); + builder.push(TokenTree::token(sp(0, 1), token::Dot).joint()); + builder.push(TokenTree::token(sp(1, 2), token::Dot).joint()); + builder.push(TokenTree::token(sp(2, 3), token::Dot)); let stream = builder.build(); assert!(stream.eq_unspanned(&string_to_ts("..."))); assert_eq!(stream.trees().count(), 1); diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 35f70092be432..e32c5f3f3ecad 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -855,7 +855,7 @@ pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute) pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) { match tt { - TokenTree::Token(_, tok) => visitor.visit_token(tok), + TokenTree::Token(token) => visitor.visit_token(token.kind), TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts), } } diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs index 4d7083c1a790b..83c4c809de372 100644 --- a/src/libsyntax_ext/asm.rs +++ b/src/libsyntax_ext/asm.rs @@ -9,7 +9,8 @@ use errors::DiagnosticBuilder; use syntax::ast; use syntax::ext::base::{self, *}; use syntax::feature_gate; -use syntax::parse::{self, token}; +use syntax::parse; +use syntax::parse::token::{self, Token}; use syntax::ptr::P; use syntax::symbol::{kw, sym, Symbol}; use syntax::ast::AsmDialect; @@ -86,8 +87,8 @@ fn parse_inline_asm<'a>( let first_colon = tts.iter() .position(|tt| { match *tt { - tokenstream::TokenTree::Token(_, token::Colon) | - tokenstream::TokenTree::Token(_, token::ModSep) => true, + tokenstream::TokenTree::Token(Token { kind: token::Colon, .. }) | + tokenstream::TokenTree::Token(Token { kind: token::ModSep, .. }) => true, _ => false, } }) diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs index 29dd445e75168..8a297a5c9bc19 100644 --- a/src/libsyntax_ext/assert.rs +++ b/src/libsyntax_ext/assert.rs @@ -29,7 +29,7 @@ pub fn expand_assert<'cx>( let panic_call = Mac_ { path: Path::from_ident(Ident::new(sym::panic, sp)), tts: custom_message.unwrap_or_else(|| { - TokenStream::from(TokenTree::Token( + TokenStream::from(TokenTree::token( DUMMY_SP, TokenKind::lit(token::Str, Symbol::intern(&format!( "assertion failed: {}", diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs index 77c53f402cc9f..59f25af374276 100644 --- a/src/libsyntax_ext/concat_idents.rs +++ b/src/libsyntax_ext/concat_idents.rs @@ -3,7 +3,7 @@ use rustc_data_structures::thin_vec::ThinVec; use syntax::ast; use syntax::ext::base::{self, *}; use syntax::feature_gate; -use syntax::parse::token; +use syntax::parse::token::{self, Token}; use syntax::ptr::P; use syntax_pos::Span; use syntax_pos::symbol::{Symbol, sym}; @@ -30,7 +30,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>, for (i, e) in tts.iter().enumerate() { if i & 1 == 1 { match *e { - TokenTree::Token(_, token::Comma) => {} + TokenTree::Token(Token { kind: token::Comma, .. }) => {} _ => { cx.span_err(sp, "concat_idents! expecting comma."); return DummyResult::any(sp); @@ -38,7 +38,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>, } } else { match *e { - TokenTree::Token(_, token::Ident(ident, _)) => + TokenTree::Token(Token { kind: token::Ident(ident, _), .. }) => res_str.push_str(&ident.as_str()), _ => { cx.span_err(sp, "concat_idents! requires ident args."); diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs index c1d93805a5811..3deab97db88c0 100644 --- a/src/libsyntax_ext/deriving/custom.rs +++ b/src/libsyntax_ext/deriving/custom.rs @@ -69,7 +69,7 @@ impl MultiItemModifier for ProcMacroDerive { MarkAttrs(&self.attrs).visit_item(&item); let token = token::Interpolated(Lrc::new(token::NtItem(item))); - let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into(); + let input = tokenstream::TokenTree::token(DUMMY_SP, token).into(); let server = proc_macro_server::Rustc::new(ecx); let stream = match self.client.run(&EXEC_STRATEGY, server, input) { diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index 119b83b7527b4..26eb9e9d4fc1f 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -55,7 +55,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> use syntax::parse::token::*; let joint = is_joint == Joint; - let (span, token) = match tree { + let Token { kind, span } = match tree { tokenstream::TokenTree::Delimited(span, delim, tts) => { let delimiter = Delimiter::from_internal(delim); return TokenTree::Group(Group { @@ -64,7 +64,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> span, }); } - tokenstream::TokenTree::Token(span, token) => (span, token), + tokenstream::TokenTree::Token(token) => token, }; macro_rules! tt { @@ -93,7 +93,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> }}; } - match token { + match kind { Eq => op!('='), Lt => op!('<'), Le => op!('<', '='), @@ -164,7 +164,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> TokenKind::lit(token::Str, Symbol::intern(&escaped), None), ] .into_iter() - .map(|token| tokenstream::TokenTree::Token(span, token)) + .map(|kind| tokenstream::TokenTree::token(span, kind)) .collect(); stack.push(TokenTree::Group(Group { delimiter: Delimiter::Bracket, @@ -212,7 +212,7 @@ impl ToInternal for TokenTree { } TokenTree::Ident(self::Ident { sym, is_raw, span }) => { let token = Ident(ast::Ident::new(sym, span), is_raw); - return tokenstream::TokenTree::Token(span, token).into(); + return tokenstream::TokenTree::token(span, token).into(); } TokenTree::Literal(self::Literal { lit: token::Lit { kind: token::Integer, symbol, suffix }, @@ -221,8 +221,8 @@ impl ToInternal for TokenTree { let minus = BinOp(BinOpToken::Minus); let symbol = Symbol::intern(&symbol.as_str()[1..]); let integer = TokenKind::lit(token::Integer, symbol, suffix); - let a = tokenstream::TokenTree::Token(span, minus); - let b = tokenstream::TokenTree::Token(span, integer); + let a = tokenstream::TokenTree::token(span, minus); + let b = tokenstream::TokenTree::token(span, integer); return vec![a, b].into_iter().collect(); } TokenTree::Literal(self::Literal { @@ -232,16 +232,16 @@ impl ToInternal for TokenTree { let minus = BinOp(BinOpToken::Minus); let symbol = Symbol::intern(&symbol.as_str()[1..]); let float = TokenKind::lit(token::Float, symbol, suffix); - let a = tokenstream::TokenTree::Token(span, minus); - let b = tokenstream::TokenTree::Token(span, float); + let a = tokenstream::TokenTree::token(span, minus); + let b = tokenstream::TokenTree::token(span, float); return vec![a, b].into_iter().collect(); } TokenTree::Literal(self::Literal { lit, span }) => { - return tokenstream::TokenTree::Token(span, Literal(lit)).into() + return tokenstream::TokenTree::token(span, Literal(lit)).into() } }; - let token = match ch { + let kind = match ch { '=' => Eq, '<' => Lt, '>' => Gt, @@ -267,7 +267,7 @@ impl ToInternal for TokenTree { _ => unreachable!(), }; - let tree = tokenstream::TokenTree::Token(span, token); + let tree = tokenstream::TokenTree::token(span, kind); TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })]) } } diff --git a/src/libsyntax_ext/trace_macros.rs b/src/libsyntax_ext/trace_macros.rs index 61ef94560ccb9..6c74f77ff1fb5 100644 --- a/src/libsyntax_ext/trace_macros.rs +++ b/src/libsyntax_ext/trace_macros.rs @@ -17,10 +17,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt<'_>, } match (tt.len(), tt.first()) { - (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::True) => { + (1, Some(TokenTree::Token(token))) if token.is_keyword(kw::True) => { cx.set_trace_macros(true); } - (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::False) => { + (1, Some(TokenTree::Token(token))) if token.is_keyword(kw::False) => { cx.set_trace_macros(false); } _ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"), From c0c57acd7b8061697d196fd800a7ff3151c37f38 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 5 Jun 2019 00:02:59 +0300 Subject: [PATCH 05/14] syntax: Use `Token` in `StringReader` and `TokenTreesReader` --- src/librustdoc/html/highlight.rs | 10 +++--- src/libsyntax/parse/lexer/mod.rs | 35 +++++++------------- src/libsyntax/parse/lexer/tokentrees.rs | 43 ++++++++++++------------- 3 files changed, 37 insertions(+), 51 deletions(-) diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index d68741233754b..bc6eaaaa8b9f0 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -234,7 +234,7 @@ impl<'a> Classifier<'a> { // reference or dereference operator or a reference or pointer type, instead of the // bit-and or multiplication operator. token::BinOp(token::And) | token::BinOp(token::Star) - if self.lexer.peek() != token::Whitespace => Class::RefKeyWord, + if self.lexer.peek() != &token::Whitespace => Class::RefKeyWord, // Consider this as part of a macro invocation if there was a // leading identifier. @@ -280,9 +280,9 @@ impl<'a> Classifier<'a> { // as an attribute. // Case 1: #![inner_attribute] - if self.lexer.peek() == token::Not { + if self.lexer.peek() == &token::Not { self.try_next_token()?; // NOTE: consumes `!` token! - if self.lexer.peek() == token::OpenDelim(token::Bracket) { + if self.lexer.peek() == &token::OpenDelim(token::Bracket) { self.in_attribute = true; out.enter_span(Class::Attribute)?; } @@ -292,7 +292,7 @@ impl<'a> Classifier<'a> { } // Case 2: #[outer_attribute] - if self.lexer.peek() == token::OpenDelim(token::Bracket) { + if self.lexer.peek() == &token::OpenDelim(token::Bracket) { self.in_attribute = true; out.enter_span(Class::Attribute)?; } @@ -341,7 +341,7 @@ impl<'a> Classifier<'a> { if self.in_macro_nonterminal { self.in_macro_nonterminal = false; Class::MacroNonTerminal - } else if self.lexer.peek() == token::Not { + } else if self.lexer.peek() == &token::Not { self.in_macro = true; Class::Macro } else { diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 225db0164fe65..9dba5ff3e8c84 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -12,7 +12,6 @@ use core::unicode::property::Pattern_White_Space; use std::borrow::Cow; use std::char; use std::iter; -use std::mem::replace; use rustc_data_structures::sync::Lrc; use log::debug; @@ -41,8 +40,7 @@ pub struct StringReader<'a> { /// Stop reading src at this index. crate end_src_index: usize, // cached: - peek_tok: TokenKind, - peek_span: Span, + peek_token: Token, peek_span_src_raw: Span, fatal_errs: Vec>, // cache a direct reference to the source text, so that we don't have to @@ -90,10 +88,7 @@ impl<'a> StringReader<'a> { /// Returns the next token. EFFECT: advances the string_reader. pub fn try_next_token(&mut self) -> Result { assert!(self.fatal_errs.is_empty()); - let ret_val = Token { - kind: replace(&mut self.peek_tok, token::Whitespace), - span: self.peek_span, - }; + let ret_val = self.peek_token.clone(); self.advance_token()?; Ok(ret_val) } @@ -158,7 +153,7 @@ impl<'a> StringReader<'a> { } fn fatal(&self, m: &str) -> FatalError { - self.fatal_span(self.peek_span, m) + self.fatal_span(self.peek_token.span, m) } crate fn emit_fatal_errors(&mut self) { @@ -179,12 +174,8 @@ impl<'a> StringReader<'a> { buffer } - pub fn peek(&self) -> Token { - // FIXME(pcwalton): Bad copy! - Token { - kind: self.peek_tok.clone(), - span: self.peek_span, - } + pub fn peek(&self) -> &Token { + &self.peek_token } /// For comments.rs, which hackily pokes into next_pos and ch @@ -215,8 +206,7 @@ impl<'a> StringReader<'a> { source_file, end_src_index: src.len(), // dummy values; not read - peek_tok: token::Eof, - peek_span: syntax_pos::DUMMY_SP, + peek_token: Token { kind: token::Eof, span: syntax_pos::DUMMY_SP }, peek_span_src_raw: syntax_pos::DUMMY_SP, src, fatal_errs: Vec::new(), @@ -321,29 +311,28 @@ impl<'a> StringReader<'a> { self.err_span_(from_pos, to_pos, &m[..]); } - /// Advance peek_tok and peek_span to refer to the next token, and + /// Advance peek_token to refer to the next token, and /// possibly update the interner. fn advance_token(&mut self) -> Result<(), ()> { match self.scan_whitespace_or_comment() { Some(comment) => { self.peek_span_src_raw = comment.span; - self.peek_span = comment.span; - self.peek_tok = comment.kind; + self.peek_token = comment; } None => { if self.is_eof() { - self.peek_tok = token::Eof; + let (real, raw) = self.mk_sp_and_raw( self.source_file.end_pos, self.source_file.end_pos, ); - self.peek_span = real; + self.peek_token = Token { kind: token::Eof, span: real }; self.peek_span_src_raw = raw; } else { let start_bytepos = self.pos; - self.peek_tok = self.next_token_inner()?; + let kind = self.next_token_inner()?; let (real, raw) = self.mk_sp_and_raw(start_bytepos, self.pos); - self.peek_span = real; + self.peek_token = Token { kind, span: real }; self.peek_span_src_raw = raw; }; } diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index abff7177abd13..0dab441c96f17 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -2,15 +2,15 @@ use syntax_pos::Span; use crate::print::pprust::token_to_string; use crate::parse::lexer::{StringReader, UnmatchedBrace}; -use crate::parse::{token, PResult}; +use crate::parse::token::{self, Token}; +use crate::parse::PResult; use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint}; impl<'a> StringReader<'a> { crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec) { let mut tt_reader = TokenTreesReader { string_reader: self, - token: token::Eof, - span: syntax_pos::DUMMY_SP, + token: token::Token { kind: token::Eof, span: syntax_pos::DUMMY_SP }, open_braces: Vec::new(), unmatched_braces: Vec::new(), matching_delim_spans: Vec::new(), @@ -23,8 +23,7 @@ impl<'a> StringReader<'a> { struct TokenTreesReader<'a> { string_reader: StringReader<'a>, - token: token::TokenKind, - span: Span, + token: Token, /// Stack of open delimiters and their spans. Used for error message. open_braces: Vec<(token::DelimToken, Span)>, unmatched_braces: Vec, @@ -52,7 +51,7 @@ impl<'a> TokenTreesReader<'a> { fn parse_token_trees_until_close_delim(&mut self) -> TokenStream { let mut tts = vec![]; loop { - if let token::CloseDelim(..) = self.token { + if let token::CloseDelim(..) = self.token.kind { return TokenStream::new(tts); } @@ -68,11 +67,11 @@ impl<'a> TokenTreesReader<'a> { fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> { let sm = self.string_reader.sess.source_map(); - match self.token { + match self.token.kind { token::Eof => { let msg = "this file contains an un-closed delimiter"; let mut err = self.string_reader.sess.span_diagnostic - .struct_span_err(self.span, msg); + .struct_span_err(self.token.span, msg); for &(_, sp) in &self.open_braces { err.span_label(sp, "un-closed delimiter"); } @@ -102,10 +101,10 @@ impl<'a> TokenTreesReader<'a> { }, token::OpenDelim(delim) => { // The span for beginning of the delimited section - let pre_span = self.span; + let pre_span = self.token.span; // Parse the open delimiter. - self.open_braces.push((delim, self.span)); + self.open_braces.push((delim, self.token.span)); self.real_token(); // Parse the token trees within the delimiters. @@ -114,9 +113,9 @@ impl<'a> TokenTreesReader<'a> { let tts = self.parse_token_trees_until_close_delim(); // Expand to cover the entire delimited token tree - let delim_span = DelimSpan::from_pair(pre_span, self.span); + let delim_span = DelimSpan::from_pair(pre_span, self.token.span); - match self.token { + match self.token.kind { // Correct delimiter. token::CloseDelim(d) if d == delim => { let (open_brace, open_brace_span) = self.open_braces.pop().unwrap(); @@ -126,7 +125,7 @@ impl<'a> TokenTreesReader<'a> { self.matching_delim_spans.clear(); } else { self.matching_delim_spans.push( - (open_brace, open_brace_span, self.span), + (open_brace, open_brace_span, self.token.span), ); } // Parse the close delimiter. @@ -136,16 +135,16 @@ impl<'a> TokenTreesReader<'a> { token::CloseDelim(other) => { let mut unclosed_delimiter = None; let mut candidate = None; - if self.last_unclosed_found_span != Some(self.span) { + if self.last_unclosed_found_span != Some(self.token.span) { // do not complain about the same unclosed delimiter multiple times - self.last_unclosed_found_span = Some(self.span); + self.last_unclosed_found_span = Some(self.token.span); // This is a conservative error: only report the last unclosed // delimiter. The previous unclosed delimiters could actually be // closed! The parser just hasn't gotten to them yet. if let Some(&(_, sp)) = self.open_braces.last() { unclosed_delimiter = Some(sp); }; - if let Some(current_padding) = sm.span_to_margin(self.span) { + if let Some(current_padding) = sm.span_to_margin(self.token.span) { for (brace, brace_span) in &self.open_braces { if let Some(padding) = sm.span_to_margin(*brace_span) { // high likelihood of these two corresponding @@ -159,7 +158,7 @@ impl<'a> TokenTreesReader<'a> { self.unmatched_braces.push(UnmatchedBrace { expected_delim: tok, found_delim: other, - found_span: self.span, + found_span: self.token.span, unclosed_span: unclosed_delimiter, candidate_span: candidate, }); @@ -198,12 +197,12 @@ impl<'a> TokenTreesReader<'a> { let token_str = token_to_string(&self.token); let msg = format!("unexpected close delimiter: `{}`", token_str); let mut err = self.string_reader.sess.span_diagnostic - .struct_span_err(self.span, &msg); - err.span_label(self.span, "unexpected close delimiter"); + .struct_span_err(self.token.span, &msg); + err.span_label(self.token.span, "unexpected close delimiter"); Err(err) }, _ => { - let tt = TokenTree::token(self.span, self.token.clone()); + let tt = TokenTree::Token(self.token.clone()); // Note that testing for joint-ness here is done via the raw // source span as the joint-ness is a property of the raw source // rather than wanting to take `override_span` into account. @@ -219,8 +218,6 @@ impl<'a> TokenTreesReader<'a> { } fn real_token(&mut self) { - let t = self.string_reader.real_token(); - self.token = t.kind; - self.span = t.span; + self.token = self.string_reader.real_token(); } } From aa6fba98ae717d6090cdd5d0569114adfc825680 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 5 Jun 2019 01:17:07 +0300 Subject: [PATCH 06/14] syntax: Use `Token` in `Parser` --- src/libsyntax/ext/tt/macro_parser.rs | 17 ++- src/libsyntax/ext/tt/macro_rules.rs | 16 +-- src/libsyntax/parse/attr.rs | 12 +- src/libsyntax/parse/diagnostics.rs | 12 +- src/libsyntax/parse/literal.rs | 26 ++--- src/libsyntax/parse/mod.rs | 2 +- src/libsyntax/parse/parser.rs | 161 ++++++++++++++------------- src/libsyntax_ext/asm.rs | 2 +- src/libsyntax_ext/assert.rs | 2 +- src/libsyntax_ext/format.rs | 2 +- 10 files changed, 126 insertions(+), 126 deletions(-) diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 6acdffedd6b1a..4f681a77ed350 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -273,7 +273,7 @@ pub enum ParseResult { Success(T), /// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected /// end of macro invocation. Otherwise, it indicates that no rules expected the given token. - Failure(syntax_pos::Span, TokenKind, &'static str), + Failure(Token, &'static str), /// Fatal error (malformed macro?). Abort compilation. Error(syntax_pos::Span, String), } @@ -701,7 +701,7 @@ pub fn parse( parser.span, ) { Success(_) => {} - Failure(sp, tok, t) => return Failure(sp, tok, t), + Failure(token, msg) => return Failure(token, msg), Error(sp, msg) => return Error(sp, msg), } @@ -727,13 +727,13 @@ pub fn parse( "ambiguity: multiple successful parses".to_string(), ); } else { + let span = if parser.span.is_dummy() { + parser.span + } else { + sess.source_map().next_point(parser.span) + }; return Failure( - if parser.span.is_dummy() { - parser.span - } else { - sess.source_map().next_point(parser.span) - }, - token::Eof, + Token { kind: token::Eof, span }, "missing tokens in macro arguments", ); } @@ -771,7 +771,6 @@ pub fn parse( // then there is a syntax error. else if bb_items.is_empty() && next_items.is_empty() { return Failure( - parser.span, parser.token.clone(), "no rules expected this token in macro call", ); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 703ad0053a0ef..05e921b1bfd1a 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -190,10 +190,10 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>, arm_span, }) } - Failure(sp, tok, t) => if sp.lo() >= best_fail_spot.lo() { - best_fail_spot = sp; - best_fail_tok = Some(tok); - best_fail_text = Some(t); + Failure(token, msg) => if token.span.lo() >= best_fail_spot.lo() { + best_fail_spot = token.span; + best_fail_tok = Some(token.kind); + best_fail_text = Some(msg); }, Error(err_sp, ref msg) => { cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]) @@ -288,11 +288,11 @@ pub fn compile( let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) { Success(m) => m, - Failure(sp, tok, t) => { - let s = parse_failure_msg(tok); - let sp = sp.substitute_dummy(def.span); + Failure(token, msg) => { + let s = parse_failure_msg(token.kind); + let sp = token.span.substitute_dummy(def.span); let mut err = sess.span_diagnostic.struct_span_fatal(sp, &s); - err.span_label(sp, t); + err.span_label(sp, msg); err.emit(); FatalError.raise(); } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 9b78b56041f21..8040168a67ec3 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -24,7 +24,7 @@ impl<'a> Parser<'a> { let mut just_parsed_doc_comment = false; loop { debug!("parse_outer_attributes: self.token={:?}", self.token); - match self.token { + match self.token.kind { token::Pound => { let inner_error_reason = if just_parsed_doc_comment { "an inner attribute is not permitted following an outer doc comment" @@ -81,7 +81,7 @@ impl<'a> Parser<'a> { debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}", inner_parse_policy, self.token); - let (span, path, tokens, style) = match self.token { + let (span, path, tokens, style) = match self.token.kind { token::Pound => { let lo = self.span; self.bump(); @@ -140,7 +140,7 @@ impl<'a> Parser<'a> { /// PATH `=` TOKEN_TREE /// The delimiters or `=` are still put into the resulting token stream. crate fn parse_meta_item_unrestricted(&mut self) -> PResult<'a, (ast::Path, TokenStream)> { - let meta = match self.token { + let meta = match self.token.kind { token::Interpolated(ref nt) => match **nt { Nonterminal::NtMeta(ref meta) => Some(meta.clone()), _ => None, @@ -159,7 +159,7 @@ impl<'a> Parser<'a> { } else if self.eat(&token::Eq) { let eq = TokenTree::token(self.prev_span, token::Eq); let mut is_interpolated_expr = false; - if let token::Interpolated(nt) = &self.token { + if let token::Interpolated(nt) = &self.token.kind { if let token::NtExpr(..) = **nt { is_interpolated_expr = true; } @@ -188,7 +188,7 @@ impl<'a> Parser<'a> { crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec> { let mut attrs: Vec = vec![]; loop { - match self.token { + match self.token.kind { token::Pound => { // Don't even try to parse if it's not an inner attribute. if !self.look_ahead(1, |t| t == &token::Not) { @@ -236,7 +236,7 @@ impl<'a> Parser<'a> { /// meta_item : IDENT ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ; /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ; pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> { - let nt_meta = match self.token { + let nt_meta = match self.token.kind { token::Interpolated(ref nt) => match **nt { token::NtMeta(ref e) => Some(e.clone()), _ => None, diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs index b391f7ca327e8..1759a229cf49d 100644 --- a/src/libsyntax/parse/diagnostics.rs +++ b/src/libsyntax/parse/diagnostics.rs @@ -201,7 +201,7 @@ impl<'a> Parser<'a> { self.span, &format!("expected identifier, found {}", self.this_token_descr()), ); - if let token::Ident(ident, false) = &self.token { + if let token::Ident(ident, false) = &self.token.kind { if ident.is_raw_guess() { err.span_suggestion( self.span, @@ -730,7 +730,7 @@ impl<'a> Parser<'a> { ) -> PResult<'a, bool /* recovered */> { let token_str = pprust::token_to_string(t); let this_token_str = self.this_token_descr(); - let (prev_sp, sp) = match (&self.token, self.subparser_name) { + let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) { // Point at the end of the macro call when reaching end of macro arguments. (token::Eof, Some(_)) => { let sp = self.sess.source_map().next_point(self.span); @@ -746,7 +746,7 @@ impl<'a> Parser<'a> { let msg = format!( "expected `{}`, found {}", token_str, - match (&self.token, self.subparser_name) { + match (&self.token.kind, self.subparser_name) { (token::Eof, Some(origin)) => format!("end of {}", origin), _ => this_token_str, }, @@ -989,7 +989,7 @@ impl<'a> Parser<'a> { break_on_semi, break_on_block); loop { debug!("recover_stmt_ loop {:?}", self.token); - match self.token { + match self.token.kind { token::OpenDelim(token::DelimToken::Brace) => { brace_depth += 1; self.bump(); @@ -1074,7 +1074,7 @@ impl<'a> Parser<'a> { } crate fn eat_incorrect_doc_comment(&mut self, applied_to: &str) { - if let token::DocComment(_) = self.token { + if let token::DocComment(_) = self.token.kind { let mut err = self.diagnostic().struct_span_err( self.span, &format!("documentation comments cannot be applied to {}", applied_to), @@ -1214,7 +1214,7 @@ impl<'a> Parser<'a> { } crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> { - let (span, msg) = match (&self.token, self.subparser_name) { + let (span, msg) = match (&self.token.kind, self.subparser_name) { (&token::Eof, Some(origin)) => { let sp = self.sess.source_map().next_point(self.span); (sp, format!("expected expression, found end of {}", origin)) diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 4b8ef20180f63..1abb8254bc6a1 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -3,7 +3,7 @@ use crate::ast::{self, Ident, Lit, LitKind}; use crate::parse::parser::Parser; use crate::parse::PResult; -use crate::parse::token::{self, TokenKind}; +use crate::parse::token::{self, Token, TokenKind}; use crate::parse::unescape::{unescape_str, unescape_char, unescape_byte_str, unescape_byte}; use crate::print::pprust; use crate::symbol::{kw, sym, Symbol}; @@ -272,44 +272,42 @@ impl<'a> Parser<'a> { if self.token == token::Dot { // Attempt to recover `.4` as `0.4`. recovered = self.look_ahead(1, |t| { - if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = *t { + if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = t.kind { let next_span = self.look_ahead_span(1); if self.span.hi() == next_span.lo() { let s = String::from("0.") + &symbol.as_str(); - let token = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); - return Some((token, self.span.to(next_span))); + let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); + return Some(Token { kind, span: self.span.to(next_span) }); } } None }); - if let Some((ref token, span)) = recovered { + if let Some(token) = &recovered { self.bump(); self.diagnostic() - .struct_span_err(span, "float literals must have an integer part") + .struct_span_err(token.span, "float literals must have an integer part") .span_suggestion( - span, + token.span, "must have an integer part", - pprust::token_to_string(&token), + pprust::token_to_string(token), Applicability::MachineApplicable, ) .emit(); } } - let (token, span) = recovered.as_ref().map_or((&self.token, self.span), - |(token, span)| (token, *span)); - - match Lit::from_token(token, span) { + let token = recovered.as_ref().unwrap_or(&self.token); + match Lit::from_token(token, token.span) { Ok(lit) => { self.bump(); Ok(lit) } Err(LitError::NotLiteral) => { let msg = format!("unexpected token: {}", self.this_token_descr()); - Err(self.span_fatal(span, &msg)) + Err(self.span_fatal(token.span, &msg)) } Err(err) => { - let lit = token.expect_lit(); + let (lit, span) = (token.expect_lit(), token.span); self.bump(); err.report(&self.sess.span_diagnostic, lit, span); let lit = token::Lit::new(token::Err, lit.symbol, lit.suffix); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 398b4b1da17b0..5187621258d0d 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -239,7 +239,7 @@ fn maybe_source_file_to_parser( let mut parser = stream_to_parser(sess, stream, None); parser.unclosed_delims = unclosed_delims; if parser.token == token::Eof && parser.span.is_dummy() { - parser.span = Span::new(end_pos, end_pos, parser.span.ctxt()); + parser.token.span = Span::new(end_pos, end_pos, parser.span.ctxt()); } Ok(parser) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index eda67b3a93d8e..cc67a3fbd6632 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -57,6 +57,7 @@ use log::debug; use std::borrow::Cow; use std::cmp; use std::mem; +use std::ops::Deref; use std::path::{self, Path, PathBuf}; use std::slice; @@ -121,7 +122,7 @@ crate enum BlockMode { /// `token::Interpolated` tokens. macro_rules! maybe_whole_expr { ($p:expr) => { - if let token::Interpolated(nt) = &$p.token { + if let token::Interpolated(nt) = &$p.token.kind { match &**nt { token::NtExpr(e) | token::NtLiteral(e) => { let e = e.clone(); @@ -147,7 +148,7 @@ macro_rules! maybe_whole_expr { /// As maybe_whole_expr, but for things other than expressions macro_rules! maybe_whole { ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { - if let token::Interpolated(nt) = &$p.token { + if let token::Interpolated(nt) = &$p.token.kind { if let token::$constructor(x) = &**nt { let $x = x.clone(); $p.bump(); @@ -161,7 +162,7 @@ macro_rules! maybe_whole { macro_rules! maybe_recover_from_interpolated_ty_qpath { ($self: expr, $allow_qpath_recovery: expr) => { if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) { - if let token::Interpolated(nt) = &$self.token { + if let token::Interpolated(nt) = &$self.token.kind { if let token::NtTy(ty) = &**nt { let ty = ty.clone(); $self.bump(); @@ -196,14 +197,13 @@ enum PrevTokenKind { #[derive(Clone)] pub struct Parser<'a> { pub sess: &'a ParseSess, - /// the current token: - pub token: token::TokenKind, - /// the span of the current token: - pub span: Span, + /// The current token. + pub token: Token, + /// The span of the previous token. meta_var_span: Option, /// The span of the previous token. pub prev_span: Span, - /// The kind of the previous troken. + /// The previous token kind. prev_token_kind: PrevTokenKind, restrictions: Restrictions, /// Used to determine the path to externally loaded source files. @@ -242,6 +242,15 @@ impl<'a> Drop for Parser<'a> { } } +// FIXME: Parser uses `self.span` all the time. +// Remove this impl if you think that using `self.token.span` instead is acceptable. +impl Deref for Parser<'_> { + type Target = Token; + fn deref(&self) -> &Self::Target { + &self.token + } +} + #[derive(Clone)] crate struct TokenCursor { crate frame: TokenCursorFrame, @@ -468,8 +477,7 @@ impl<'a> Parser<'a> { ) -> Self { let mut parser = Parser { sess, - token: token::Whitespace, - span: DUMMY_SP, + token: Token { kind: token::Whitespace, span: DUMMY_SP }, prev_span: DUMMY_SP, meta_var_span: None, prev_token_kind: PrevTokenKind::Other, @@ -498,9 +506,7 @@ impl<'a> Parser<'a> { subparser_name, }; - let tok = parser.next_tok(); - parser.token = tok.kind; - parser.span = tok.span; + parser.token = parser.next_tok(); if let Some(directory) = directory { parser.directory = directory; @@ -534,7 +540,7 @@ impl<'a> Parser<'a> { } crate fn token_descr(&self) -> Option<&'static str> { - Some(match &self.token { + Some(match &self.token.kind { t if t.is_special_ident() => "reserved identifier", t if t.is_used_keyword() => "keyword", t if t.is_unused_keyword() => "reserved keyword", @@ -612,7 +618,7 @@ impl<'a> Parser<'a> { } fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> { - match self.token { + match self.token.kind { token::Ident(ident, _) => { if self.token.is_reserved_ident() { let mut err = self.expected_ident_found(); @@ -732,7 +738,7 @@ impl<'a> Parser<'a> { /// See issue #47856 for an example of when this may occur. fn eat_plus(&mut self) -> bool { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus))); - match self.token { + match self.token.kind { token::BinOp(token::Plus) => { self.bump(); true @@ -763,7 +769,7 @@ impl<'a> Parser<'a> { /// `&` and continues. If an `&` is not seen, signals an error. fn expect_and(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::And))); - match self.token { + match self.token.kind { token::BinOp(token::And) => { self.bump(); Ok(()) @@ -780,7 +786,7 @@ impl<'a> Parser<'a> { /// `|` and continues. If an `|` is not seen, signals an error. fn expect_or(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or))); - match self.token { + match self.token.kind { token::BinOp(token::Or) => { self.bump(); Ok(()) @@ -805,7 +811,7 @@ impl<'a> Parser<'a> { /// starting token. fn eat_lt(&mut self) -> bool { self.expected_tokens.push(TokenType::Token(token::Lt)); - let ate = match self.token { + let ate = match self.token.kind { token::Lt => { self.bump(); true @@ -845,7 +851,7 @@ impl<'a> Parser<'a> { /// with a single `>` and continues. If a `>` is not seen, signals an error. fn expect_gt(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::Gt)); - let ate = match self.token { + let ate = match self.token.kind { token::Gt => { self.bump(); Some(()) @@ -928,7 +934,7 @@ impl<'a> Parser<'a> { TokenExpectType::NoExpect => self.token == **k, } }) { - match self.token { + match self.token.kind { token::CloseDelim(..) | token::Eof => break, _ => {} }; @@ -1011,7 +1017,7 @@ impl<'a> Parser<'a> { self.prev_span = self.meta_var_span.take().unwrap_or(self.span); // Record last token kind for possible error recovery. - self.prev_token_kind = match self.token { + self.prev_token_kind = match self.token.kind { token::DocComment(..) => PrevTokenKind::DocComment, token::Comma => PrevTokenKind::Comma, token::BinOp(token::Plus) => PrevTokenKind::Plus, @@ -1022,9 +1028,7 @@ impl<'a> Parser<'a> { _ => PrevTokenKind::Other, }; - let next = self.next_tok(); - self.token = next.kind; - self.span = next.span; + self.token = self.next_tok(); self.expected_tokens.clear(); // check after each token self.process_potential_macro_variable(); @@ -1038,24 +1042,25 @@ impl<'a> Parser<'a> { // fortunately for tokens currently using `bump_with`, the // prev_token_kind will be of no use anyway. self.prev_token_kind = PrevTokenKind::Other; - self.token = next; - self.span = span; + self.token = Token { kind: next, span }; self.expected_tokens.clear(); } pub fn look_ahead(&self, dist: usize, f: F) -> R where - F: FnOnce(&token::TokenKind) -> R, + F: FnOnce(&token::Token) -> R, { if dist == 0 { - return f(&self.token) + // FIXME: Avoid cloning here. + return f(&self.token); } - f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { + let frame = &self.token_cursor.frame; + f(&match frame.tree_cursor.look_ahead(dist - 1) { Some(tree) => match tree { - TokenTree::Token(token) => token.kind, - TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim), - }, - None => token::CloseDelim(self.token_cursor.frame.delim), + TokenTree::Token(token) => token, + TokenTree::Delimited(dspan, delim, _) => Token { kind: token::OpenDelim(delim), span: dspan.open }, + } + None => Token { kind: token::CloseDelim(frame.delim), span: frame.span.close } }) } @@ -1209,7 +1214,7 @@ impl<'a> Parser<'a> { decl, }; - let body = match self.token { + let body = match self.token.kind { token::Semi => { self.bump(); *at_end = true; @@ -1477,7 +1482,7 @@ impl<'a> Parser<'a> { } fn is_named_argument(&self) -> bool { - let offset = match self.token { + let offset = match self.token.kind { token::Interpolated(ref nt) => match **nt { token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon), _ => 0, @@ -1612,7 +1617,7 @@ impl<'a> Parser<'a> { } fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { - match self.token { + match self.token.kind { token::Ident(ident, _) if self.token.is_path_segment_keyword() => { let span = self.span; self.bump(); @@ -1623,7 +1628,7 @@ impl<'a> Parser<'a> { } fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> { - match self.token { + match self.token.kind { token::Ident(ident, false) if ident.name == kw::Underscore => { let span = self.span; self.bump(); @@ -1710,7 +1715,7 @@ impl<'a> Parser<'a> { /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]` /// attributes. pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> { - let meta_ident = match self.token { + let meta_ident = match self.token.kind { token::Interpolated(ref nt) => match **nt { token::NtMeta(ref meta) => match meta.node { ast::MetaItemKind::Word => Some(meta.path.clone()), @@ -1859,7 +1864,7 @@ impl<'a> Parser<'a> { } fn parse_field_name(&mut self) -> PResult<'a, Ident> { - if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token { + if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind { self.expect_no_suffix(self.span, "a tuple index", suffix); self.bump(); Ok(Ident::new(symbol, self.prev_span)) @@ -1949,7 +1954,7 @@ impl<'a> Parser<'a> { } fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> { - let delim = match self.token { + let delim = match self.token.kind { token::OpenDelim(delim) => delim, _ => { let msg = "expected open delimiter"; @@ -1993,7 +1998,7 @@ impl<'a> Parser<'a> { let ex: ExprKind; // Note: when adding new syntax here, don't forget to adjust TokenKind::can_begin_expr(). - match self.token { + match self.token.kind { token::OpenDelim(token::Paren) => { self.bump(); @@ -2363,7 +2368,7 @@ impl<'a> Parser<'a> { } let mut recovery_field = None; - if let token::Ident(ident, _) = self.token { + if let token::Ident(ident, _) = self.token.kind { if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) { // Use in case of error after field-looking code: `S { foo: () with a }` let mut ident = ident.clone(); @@ -2503,7 +2508,7 @@ impl<'a> Parser<'a> { let segment = self.parse_path_segment(PathStyle::Expr)?; self.check_trailing_angle_brackets(&segment, token::OpenDelim(token::Paren)); - Ok(match self.token { + Ok(match self.token.kind { token::OpenDelim(token::Paren) => { // Method call `expr.f()` let mut args = self.parse_unspanned_seq( @@ -2542,7 +2547,7 @@ impl<'a> Parser<'a> { // expr.f if self.eat(&token::Dot) { - match self.token { + match self.token.kind { token::Ident(..) => { e = self.parse_dot_suffix(e, lo)?; } @@ -2594,7 +2599,7 @@ impl<'a> Parser<'a> { continue; } if self.expr_is_complete(&e) { break; } - match self.token { + match self.token.kind { // expr(...) token::OpenDelim(token::Paren) => { let seq = self.parse_unspanned_seq( @@ -2627,11 +2632,11 @@ impl<'a> Parser<'a> { } crate fn process_potential_macro_variable(&mut self) { - let (token, span) = match self.token { + self.token = match self.token.kind { token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() && self.look_ahead(1, |t| t.is_ident()) => { self.bump(); - let name = match self.token { + let name = match self.token.kind { token::Ident(ident, _) => ident, _ => unreachable!() }; @@ -2646,24 +2651,22 @@ impl<'a> Parser<'a> { // Interpolated identifier and lifetime tokens are replaced with usual identifier // and lifetime tokens, so the former are never encountered during normal parsing. match **nt { - token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span), - token::NtLifetime(ident) => (token::Lifetime(ident), ident.span), + token::NtIdent(ident, is_raw) => Token { kind: token::Ident(ident, is_raw), span: ident.span }, + token::NtLifetime(ident) => Token { kind: token::Lifetime(ident), span: ident.span }, _ => return, } } _ => return, }; - self.token = token; - self.span = span; } /// Parses a single token tree from the input. crate fn parse_token_tree(&mut self) -> TokenTree { - match self.token { + match self.token.kind { token::OpenDelim(..) => { let frame = mem::replace(&mut self.token_cursor.frame, self.token_cursor.stack.pop().unwrap()); - self.span = frame.span.entire(); + self.token.span = frame.span.entire(); self.bump(); TokenTree::Delimited( frame.span, @@ -2673,9 +2676,9 @@ impl<'a> Parser<'a> { }, token::CloseDelim(_) | token::Eof => unreachable!(), _ => { - let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span); + let token = mem::replace(&mut self.token, Token { kind: token::Whitespace, span: DUMMY_SP }); self.bump(); - TokenTree::token(span, token) + TokenTree::Token(token) } } } @@ -2692,7 +2695,7 @@ impl<'a> Parser<'a> { pub fn parse_tokens(&mut self) -> TokenStream { let mut result = Vec::new(); loop { - match self.token { + match self.token.kind { token::Eof | token::CloseDelim(..) => break, _ => result.push(self.parse_token_tree().into()), } @@ -2707,7 +2710,7 @@ impl<'a> Parser<'a> { let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; let lo = self.span; // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() - let (hi, ex) = match self.token { + let (hi, ex) = match self.token.kind { token::Not => { self.bump(); let e = self.parse_prefix_expr(None); @@ -2760,7 +2763,7 @@ impl<'a> Parser<'a> { // `not` is just an ordinary identifier in Rust-the-language, // but as `rustc`-the-compiler, we can issue clever diagnostics // for confused users who really want to say `!` - let token_cannot_continue_expr = |t: &token::TokenKind| match *t { + let token_cannot_continue_expr = |t: &token::Token| match t.kind { // These tokens can start an expression after `!`, but // can't continue an expression after an ident token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw), @@ -3040,7 +3043,7 @@ impl<'a> Parser<'a> { match self.parse_path(PathStyle::Expr) { Ok(path) => { - let (op_noun, op_verb) = match self.token { + let (op_noun, op_verb) = match self.token.kind { token::Lt => ("comparison", "comparing"), token::BinOp(token::Shl) => ("shift", "shifting"), _ => { @@ -3844,14 +3847,14 @@ impl<'a> Parser<'a> { // helper function to decide whether to parse as ident binding or to try to do // something more complex like range patterns fn parse_as_ident(&mut self) -> bool { - self.look_ahead(1, |t| match *t { + self.look_ahead(1, |t| match t.kind { token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) | token::DotDotDot | token::DotDotEq | token::ModSep | token::Not => Some(false), // ensure slice patterns [a, b.., c] and [a, b, c..] don't go into the // range pattern branch token::DotDot => None, _ => Some(true), - }).unwrap_or_else(|| self.look_ahead(2, |t| match *t { + }).unwrap_or_else(|| self.look_ahead(2, |t| match t.kind { token::Comma | token::CloseDelim(token::Bracket) => true, _ => false, })) @@ -3914,12 +3917,12 @@ impl<'a> Parser<'a> { let lo = self.span; let pat; - match self.token { + match self.token.kind { token::BinOp(token::And) | token::AndAnd => { // Parse &pat / &mut pat self.expect_and()?; let mutbl = self.parse_mutability(); - if let token::Lifetime(ident) = self.token { + if let token::Lifetime(ident) = self.token.kind { let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", ident)); err.span_label(self.span, "unexpected lifetime"); @@ -3990,7 +3993,7 @@ impl<'a> Parser<'a> { // Parse an unqualified path (None, self.parse_path(PathStyle::Expr)?) }; - match self.token { + match self.token.kind { token::Not if qself.is_none() => { // Parse macro invocation self.bump(); @@ -3999,7 +4002,7 @@ impl<'a> Parser<'a> { pat = PatKind::Mac(mac); } token::DotDotDot | token::DotDotEq | token::DotDot => { - let end_kind = match self.token { + let end_kind = match self.token.kind { token::DotDot => RangeEnd::Excluded, token::DotDotDot => RangeEnd::Included(RangeSyntax::DotDotDot), token::DotDotEq => RangeEnd::Included(RangeSyntax::DotDotEq), @@ -4325,7 +4328,7 @@ impl<'a> Parser<'a> { fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span) -> PResult<'a, Option>> { let token_lo = self.span; - let (ident, def) = match self.token { + let (ident, def) = match self.token.kind { token::Ident(ident, false) if ident.name == kw::Macro => { self.bump(); let ident = self.parse_ident()?; @@ -4436,7 +4439,7 @@ impl<'a> Parser<'a> { } // it's a macro invocation - let id = match self.token { + let id = match self.token.kind { token::OpenDelim(_) => Ident::invalid(), // no special identifier _ => self.parse_ident()?, }; @@ -4444,7 +4447,7 @@ impl<'a> Parser<'a> { // check that we're pointing at delimiters (need to check // again after the `if`, because of `parse_ident` // consuming more tokens). - match self.token { + match self.token.kind { token::OpenDelim(_) => {} _ => { // we only expect an ident if we didn't parse one @@ -4481,7 +4484,7 @@ impl<'a> Parser<'a> { // We used to incorrectly stop parsing macro-expanded statements here. // If the next token will be an error anyway but could have parsed with the // earlier behavior, stop parsing here and emit a warning to avoid breakage. - else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token { + else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token.kind { // These can continue an expression, so we can't stop parsing and warn. token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) | token::BinOp(token::Minus) | token::BinOp(token::Star) | @@ -5250,7 +5253,7 @@ impl<'a> Parser<'a> { assoc_ty_constraints.push(span); } else if self.check_const_arg() { // Parse const argument. - let expr = if let token::OpenDelim(token::Brace) = self.token { + let expr = if let token::OpenDelim(token::Brace) = self.token.kind { self.parse_block_expr(None, self.span, BlockCheckMode::Default, ThinVec::new())? } else if self.token.is_ident() { // FIXME(const_generics): to distinguish between idents for types and consts, @@ -5477,7 +5480,7 @@ impl<'a> Parser<'a> { /// Returns the parsed optional self argument and whether a self shortcut was used. fn parse_self_arg(&mut self) -> PResult<'a, Option> { - let expect_ident = |this: &mut Self| match this.token { + let expect_ident = |this: &mut Self| match this.token.kind { // Preserve hygienic context. token::Ident(ident, _) => { let span = this.span; this.bump(); Ident::new(ident.name, span) } @@ -5492,7 +5495,7 @@ impl<'a> Parser<'a> { // Only a limited set of initial token sequences is considered `self` parameters; anything // else is parsed as a normal function parameter list, so some lookahead is required. let eself_lo = self.span; - let (eself, eself_ident, eself_hi) = match self.token { + let (eself, eself_ident, eself_hi) = match self.token.kind { token::BinOp(token::And) => { // `&self` // `&mut self` @@ -5803,7 +5806,7 @@ impl<'a> Parser<'a> { match *vis { VisibilityKind::Inherited => {} _ => { - let is_macro_rules: bool = match self.token { + let is_macro_rules: bool = match self.token.kind { token::Ident(sid, _) => sid.name == sym::macro_rules, _ => false, }; @@ -5918,7 +5921,7 @@ impl<'a> Parser<'a> { self.expect(&token::OpenDelim(token::Brace))?; let mut trait_items = vec![]; while !self.eat(&token::CloseDelim(token::Brace)) { - if let token::DocComment(_) = self.token { + if let token::DocComment(_) = self.token.kind { if self.look_ahead(1, |tok| tok == &token::CloseDelim(token::Brace)) { let mut err = self.diagnostic().struct_span_err_with_code( @@ -6246,7 +6249,7 @@ impl<'a> Parser<'a> { if self.token == token::Comma { seen_comma = true; } - match self.token { + match self.token.kind { token::Comma => { self.bump(); } @@ -7011,7 +7014,7 @@ impl<'a> Parser<'a> { /// Parses a string as an ABI spec on an extern type or module. Consumes /// the `extern` keyword, if one is found. fn parse_opt_abi(&mut self) -> PResult<'a, Option> { - match self.token { + match self.token.kind { token::Literal(token::Lit { kind: token::Str, symbol, suffix }) | token::Literal(token::Lit { kind: token::StrRaw(..), symbol, suffix }) => { let sp = self.span; @@ -7046,7 +7049,7 @@ impl<'a> Parser<'a> { if token.is_keyword(kw::Move) { return true; } - match *token { + match token.kind { token::BinOp(token::Or) | token::OrOr => true, _ => false, } @@ -7818,7 +7821,7 @@ impl<'a> Parser<'a> { } pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option)> { - let ret = match self.token { + let ret = match self.token.kind { token::Literal(token::Lit { kind: token::Str, symbol, suffix }) => (symbol, ast::StrStyle::Cooked, suffix), token::Literal(token::Lit { kind: token::StrRaw(n), symbol, suffix }) => diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs index 83c4c809de372..b015815ac9c1e 100644 --- a/src/libsyntax_ext/asm.rs +++ b/src/libsyntax_ext/asm.rs @@ -260,7 +260,7 @@ fn parse_inline_asm<'a>( loop { // MOD_SEP is a double colon '::' without space in between. // When encountered, the state must be advanced twice. - match (&p.token, state.next(), state.next().next()) { + match (&p.token.kind, state.next(), state.next().next()) { (&token::Colon, StateNone, _) | (&token::ModSep, _, StateNone) => { p.bump(); diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs index 8a297a5c9bc19..e5e422c4d9c77 100644 --- a/src/libsyntax_ext/assert.rs +++ b/src/libsyntax_ext/assert.rs @@ -103,7 +103,7 @@ fn parse_assert<'a>( // // Parse this as an actual message, and suggest inserting a comma. Eventually, this should be // turned into an error. - let custom_message = if let token::Literal(token::Lit { kind: token::Str, .. }) = parser.token { + let custom_message = if let token::Literal(token::Lit { kind: token::Str, .. }) = parser.token.kind { let mut err = cx.struct_span_warn(parser.span, "unexpected string literal"); let comma_span = cx.source_map().next_point(parser.prev_span); err.span_suggestion_short( diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index b5be45547cfbe..0eaac544e332a 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -149,7 +149,7 @@ fn parse_args<'a>( } // accept trailing commas if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) { named = true; - let ident = if let token::Ident(i, _) = p.token { + let ident = if let token::Ident(i, _) = p.token.kind { p.bump(); i } else { From 5e693531ffa55cfb0cececdf5d7203a6d400e828 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 5 Jun 2019 09:39:34 +0300 Subject: [PATCH 07/14] syntax: Add some helper methods to `Token` --- src/libsyntax/attr/mod.rs | 4 +- src/libsyntax/ext/tt/macro_parser.rs | 13 +++--- src/libsyntax/ext/tt/quoted.rs | 2 +- src/libsyntax/parse/lexer/mod.rs | 61 ++++++++++--------------- src/libsyntax/parse/lexer/tokentrees.rs | 4 +- src/libsyntax/parse/literal.rs | 2 +- src/libsyntax/parse/parser.rs | 20 ++++---- src/libsyntax/parse/token.rs | 18 +++++++- src/libsyntax/tokenstream.rs | 2 +- 9 files changed, 64 insertions(+), 62 deletions(-) diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 448061395afdc..56afc8728b4ca 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -604,8 +604,8 @@ impl NestedMetaItem { fn from_tokens(tokens: &mut iter::Peekable) -> Option where I: Iterator, { - if let Some(TokenTree::Token(token)) = tokens.peek().cloned() { - if let Ok(lit) = Lit::from_token(&token, token.span) { + if let Some(TokenTree::Token(token)) = tokens.peek() { + if let Ok(lit) = Lit::from_token(token, token.span) { tokens.next(); return Some(NestedMetaItem::Literal(lit)); } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 4f681a77ed350..7127acabb44e0 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -727,13 +727,12 @@ pub fn parse( "ambiguity: multiple successful parses".to_string(), ); } else { - let span = if parser.span.is_dummy() { - parser.span - } else { - sess.source_map().next_point(parser.span) - }; return Failure( - Token { kind: token::Eof, span }, + Token::new(token::Eof, if parser.span.is_dummy() { + parser.span + } else { + sess.source_map().next_point(parser.span) + }), "missing tokens in macro arguments", ); } @@ -771,7 +770,7 @@ pub fn parse( // then there is a syntax error. else if bb_items.is_empty() && next_items.is_empty() { return Failure( - parser.token.clone(), + parser.token.take(), "no rules expected this token in macro call", ); } diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 9f4e35ad3d779..558b07af6110c 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -154,7 +154,7 @@ impl TokenTree { } crate fn token(span: Span, kind: TokenKind) -> TokenTree { - TokenTree::Token(Token { kind, span }) + TokenTree::Token(Token::new(kind, span)) } } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 9dba5ff3e8c84..47428c9a14ce7 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -88,7 +88,7 @@ impl<'a> StringReader<'a> { /// Returns the next token. EFFECT: advances the string_reader. pub fn try_next_token(&mut self) -> Result { assert!(self.fatal_errs.is_empty()); - let ret_val = self.peek_token.clone(); + let ret_val = self.peek_token.take(); self.advance_token()?; Ok(ret_val) } @@ -205,8 +205,7 @@ impl<'a> StringReader<'a> { ch: Some('\n'), source_file, end_src_index: src.len(), - // dummy values; not read - peek_token: Token { kind: token::Eof, span: syntax_pos::DUMMY_SP }, + peek_token: Token::dummy(), peek_span_src_raw: syntax_pos::DUMMY_SP, src, fatal_errs: Vec::new(), @@ -320,21 +319,15 @@ impl<'a> StringReader<'a> { self.peek_token = comment; } None => { - if self.is_eof() { - - let (real, raw) = self.mk_sp_and_raw( - self.source_file.end_pos, - self.source_file.end_pos, - ); - self.peek_token = Token { kind: token::Eof, span: real }; - self.peek_span_src_raw = raw; + let (kind, start_pos, end_pos) = if self.is_eof() { + (token::Eof, self.source_file.end_pos, self.source_file.end_pos) } else { - let start_bytepos = self.pos; - let kind = self.next_token_inner()?; - let (real, raw) = self.mk_sp_and_raw(start_bytepos, self.pos); - self.peek_token = Token { kind, span: real }; - self.peek_span_src_raw = raw; + let start_pos = self.pos; + (self.next_token_inner()?, start_pos, self.pos) }; + let (real, raw) = self.mk_sp_and_raw(start_pos, end_pos); + self.peek_token = Token::new(kind, real); + self.peek_span_src_raw = raw; } } @@ -544,7 +537,7 @@ impl<'a> StringReader<'a> { } else { token::Comment }; - Some(Token { kind, span: self.mk_sp(start_bpos, self.pos) }) + Some(Token::new(kind, self.mk_sp(start_bpos, self.pos))) } Some('*') => { self.bump(); @@ -568,10 +561,10 @@ impl<'a> StringReader<'a> { while !self.ch_is('\n') && !self.is_eof() { self.bump(); } - return Some(Token { - kind: token::Shebang(self.name_from(start)), - span: self.mk_sp(start, self.pos), - }); + return Some(Token::new( + token::Shebang(self.name_from(start)), + self.mk_sp(start, self.pos), + )); } } None @@ -596,10 +589,7 @@ impl<'a> StringReader<'a> { while is_pattern_whitespace(self.ch) { self.bump(); } - let c = Some(Token { - kind: token::Whitespace, - span: self.mk_sp(start_bpos, self.pos), - }); + let c = Some(Token::new(token::Whitespace, self.mk_sp(start_bpos, self.pos))); debug!("scanning whitespace: {:?}", c); c } @@ -658,10 +648,7 @@ impl<'a> StringReader<'a> { token::Comment }; - Some(Token { - kind, - span: self.mk_sp(start_bpos, self.pos), - }) + Some(Token::new(kind, self.mk_sp(start_bpos, self.pos))) }) } @@ -1588,10 +1575,10 @@ mod tests { assert_eq!(string_reader.next_token(), token::Comment); assert_eq!(string_reader.next_token(), token::Whitespace); let tok1 = string_reader.next_token(); - let tok2 = Token { - kind: token::Ident(id, false), - span: Span::new(BytePos(21), BytePos(23), NO_EXPANSION), - }; + let tok2 = Token::new( + token::Ident(id, false), + Span::new(BytePos(21), BytePos(23), NO_EXPANSION), + ); assert_eq!(tok1.kind, tok2.kind); assert_eq!(tok1.span, tok2.span); assert_eq!(string_reader.next_token(), token::Whitespace); @@ -1599,10 +1586,10 @@ mod tests { assert_eq!(string_reader.pos.clone(), BytePos(28)); // read another token: let tok3 = string_reader.next_token(); - let tok4 = Token { - kind: mk_ident("main"), - span: Span::new(BytePos(24), BytePos(28), NO_EXPANSION), - }; + let tok4 = Token::new( + mk_ident("main"), + Span::new(BytePos(24), BytePos(28), NO_EXPANSION), + ); assert_eq!(tok3.kind, tok4.kind); assert_eq!(tok3.span, tok4.span); // the lparen is already read: diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 0dab441c96f17..b809f99beba33 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -10,7 +10,7 @@ impl<'a> StringReader<'a> { crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec) { let mut tt_reader = TokenTreesReader { string_reader: self, - token: token::Token { kind: token::Eof, span: syntax_pos::DUMMY_SP }, + token: Token::dummy(), open_braces: Vec::new(), unmatched_braces: Vec::new(), matching_delim_spans: Vec::new(), @@ -202,7 +202,7 @@ impl<'a> TokenTreesReader<'a> { Err(err) }, _ => { - let tt = TokenTree::Token(self.token.clone()); + let tt = TokenTree::Token(self.token.take()); // Note that testing for joint-ness here is done via the raw // source span as the joint-ness is a property of the raw source // rather than wanting to take `override_span` into account. diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 1abb8254bc6a1..978fd205ea489 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -277,7 +277,7 @@ impl<'a> Parser<'a> { if self.span.hi() == next_span.lo() { let s = String::from("0.") + &symbol.as_str(); let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); - return Some(Token { kind, span: self.span.to(next_span) }); + return Some(Token::new(kind, self.span.to(next_span))); } } None diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index cc67a3fbd6632..7dd92f022e1f8 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -318,7 +318,7 @@ impl TokenCursor { self.frame = frame; continue } else { - return Token { kind: token::Eof, span: DUMMY_SP } + return Token::new(token::Eof, DUMMY_SP); }; match self.frame.last_token { @@ -477,7 +477,7 @@ impl<'a> Parser<'a> { ) -> Self { let mut parser = Parser { sess, - token: Token { kind: token::Whitespace, span: DUMMY_SP }, + token: Token::dummy(), prev_span: DUMMY_SP, meta_var_span: None, prev_token_kind: PrevTokenKind::Other, @@ -1042,12 +1042,12 @@ impl<'a> Parser<'a> { // fortunately for tokens currently using `bump_with`, the // prev_token_kind will be of no use anyway. self.prev_token_kind = PrevTokenKind::Other; - self.token = Token { kind: next, span }; + self.token = Token::new(next, span); self.expected_tokens.clear(); } pub fn look_ahead(&self, dist: usize, f: F) -> R where - F: FnOnce(&token::Token) -> R, + F: FnOnce(&Token) -> R, { if dist == 0 { // FIXME: Avoid cloning here. @@ -1058,9 +1058,9 @@ impl<'a> Parser<'a> { f(&match frame.tree_cursor.look_ahead(dist - 1) { Some(tree) => match tree { TokenTree::Token(token) => token, - TokenTree::Delimited(dspan, delim, _) => Token { kind: token::OpenDelim(delim), span: dspan.open }, + TokenTree::Delimited(dspan, delim, _) => Token::new(token::OpenDelim(delim), dspan.open), } - None => Token { kind: token::CloseDelim(frame.delim), span: frame.span.close } + None => Token::new(token::CloseDelim(frame.delim), frame.span.close) }) } @@ -2651,8 +2651,8 @@ impl<'a> Parser<'a> { // Interpolated identifier and lifetime tokens are replaced with usual identifier // and lifetime tokens, so the former are never encountered during normal parsing. match **nt { - token::NtIdent(ident, is_raw) => Token { kind: token::Ident(ident, is_raw), span: ident.span }, - token::NtLifetime(ident) => Token { kind: token::Lifetime(ident), span: ident.span }, + token::NtIdent(ident, is_raw) => Token::new(token::Ident(ident, is_raw), ident.span), + token::NtLifetime(ident) => Token::new(token::Lifetime(ident), ident.span), _ => return, } } @@ -2676,7 +2676,7 @@ impl<'a> Parser<'a> { }, token::CloseDelim(_) | token::Eof => unreachable!(), _ => { - let token = mem::replace(&mut self.token, Token { kind: token::Whitespace, span: DUMMY_SP }); + let token = self.token.take(); self.bump(); TokenTree::Token(token) } @@ -2763,7 +2763,7 @@ impl<'a> Parser<'a> { // `not` is just an ordinary identifier in Rust-the-language, // but as `rustc`-the-compiler, we can issue clever diagnostics // for confused users who really want to say `!` - let token_cannot_continue_expr = |t: &token::Token| match t.kind { + let token_cannot_continue_expr = |t: &Token| match t.kind { // These tokens can start an expression after `!`, but // can't continue an expression after an ident token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw), diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index a06bf9fae7c29..559e0524a4bf1 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -13,7 +13,7 @@ use crate::syntax::parse::parse_stream_from_source_str; use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree}; use syntax_pos::symbol::{self, Symbol}; -use syntax_pos::{self, Span, FileName}; +use syntax_pos::{self, Span, FileName, DUMMY_SP}; use log::info; use std::fmt; @@ -609,6 +609,22 @@ impl TokenKind { } } +impl Token { + crate fn new(kind: TokenKind, span: Span) -> Self { + Token { kind, span } + } + + /// Some token that will be thrown away later. + crate fn dummy() -> Self { + Token::new(TokenKind::Whitespace, DUMMY_SP) + } + + /// Return this token by value and leave a dummy token in its place. + crate fn take(&mut self) -> Self { + mem::replace(self, Token::dummy()) + } +} + impl PartialEq for Token { fn eq(&self, rhs: &TokenKind) -> bool { self.kind == *rhs diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index e6fe33d6ccf26..140b77b6b5f91 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -139,7 +139,7 @@ impl TokenTree { } pub fn token(span: Span, kind: TokenKind) -> TokenTree { - TokenTree::Token(Token { kind, span }) + TokenTree::Token(Token::new(kind, span)) } /// Returns the opening delimiter as a token tree. From 4c5d773b4d529c6263f682513ea34ce644a8179b Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 5 Jun 2019 11:00:22 +0300 Subject: [PATCH 08/14] syntax: Remove duplicate span from `token::Lifetime` --- src/librustc/ich/impls_syntax.rs | 2 +- src/libsyntax/ext/tt/macro_parser.rs | 4 +-- src/libsyntax/mut_visit.rs | 1 - src/libsyntax/parse/lexer/mod.rs | 14 +++------ src/libsyntax/parse/parser.rs | 7 ++--- src/libsyntax/parse/token.rs | 43 ++++++++++++++++---------- src/libsyntax_ext/proc_macro_server.rs | 4 +-- 7 files changed, 39 insertions(+), 36 deletions(-) diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index a373f434bf71e..20d308e5fe87b 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -357,7 +357,7 @@ impl<'a> HashStable> for token::TokenKind { ident.name.hash_stable(hcx, hasher); is_raw.hash_stable(hcx, hasher); } - token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher), + token::Lifetime(name) => name.hash_stable(hcx, hasher), token::Interpolated(_) => { bug!("interpolated tokens should not be present in the HIR") diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 7127acabb44e0..f93b548c50106 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -431,8 +431,8 @@ pub fn parse_failure_msg(tok: TokenKind) -> String { fn token_name_eq(t1: &TokenKind, t2: &TokenKind) -> bool { if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) { id1.name == id2.name && is_raw1 == is_raw2 - } else if let (Some(id1), Some(id2)) = (t1.lifetime(), t2.lifetime()) { - id1.name == id2.name + } else if let (Some(name1), Some(name2)) = (t1.lifetime_name(), t2.lifetime_name()) { + name1 == name2 } else { *t1 == *t2 } diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index ad6d3f71c652e..3bb36605299fd 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -599,7 +599,6 @@ pub fn noop_visit_tts(TokenStream(tts): &mut TokenStream, vis: &m pub fn noop_visit_token(t: &mut TokenKind, vis: &mut T) { match t { token::Ident(id, _is_raw) => vis.visit_ident(id), - token::Lifetime(id) => vis.visit_ident(id), token::Interpolated(nt) => { let mut nt = Lrc::make_mut(nt); vis.visit_interpolated(&mut nt); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 47428c9a14ce7..da8c6f5ac2204 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1041,13 +1041,6 @@ impl<'a> StringReader<'a> { return Ok(TokenKind::lit(token::Char, symbol, None)); } - // Include the leading `'` in the real identifier, for macro - // expansion purposes. See #12512 for the gory details of why - // this is necessary. - let ident = self.with_str_from(start_with_quote, |lifetime_name| { - self.mk_ident(lifetime_name) - }); - if starts_with_number { // this is a recovered lifetime written `'1`, error but accept it self.err_span_( @@ -1057,7 +1050,10 @@ impl<'a> StringReader<'a> { ); } - return Ok(token::Lifetime(ident)); + // Include the leading `'` in the real identifier, for macro + // expansion purposes. See #12512 for the gory details of why + // this is necessary. + return Ok(token::Lifetime(self.name_from(start_with_quote))); } let msg = "unterminated character literal"; let symbol = self.scan_single_quoted_string(start_with_quote, msg); @@ -1690,7 +1686,7 @@ mod tests { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token(), - token::Lifetime(Ident::from_str("'abc"))); + token::Lifetime(Symbol::intern("'abc"))); }) } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 7dd92f022e1f8..362f81d02a043 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2652,7 +2652,7 @@ impl<'a> Parser<'a> { // and lifetime tokens, so the former are never encountered during normal parsing. match **nt { token::NtIdent(ident, is_raw) => Token::new(token::Ident(ident, is_raw), ident.span), - token::NtLifetime(ident) => Token::new(token::Lifetime(ident), ident.span), + token::NtLifetime(ident) => Token::new(token::Lifetime(ident.name), ident.span), _ => return, } } @@ -3922,9 +3922,8 @@ impl<'a> Parser<'a> { // Parse &pat / &mut pat self.expect_and()?; let mutbl = self.parse_mutability(); - if let token::Lifetime(ident) = self.token.kind { - let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", - ident)); + if let token::Lifetime(name) = self.token.kind { + let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", name)); err.span_label(self.span, "unexpected lifetime"); return Err(err); } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 559e0524a4bf1..81c93a4179e36 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -12,7 +12,7 @@ use crate::symbol::kw; use crate::syntax::parse::parse_stream_from_source_str; use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree}; -use syntax_pos::symbol::{self, Symbol}; +use syntax_pos::symbol::Symbol; use syntax_pos::{self, Span, FileName, DUMMY_SP}; use log::info; @@ -211,7 +211,7 @@ pub enum TokenKind { /* Name components */ Ident(ast::Ident, /* is_raw */ bool), - Lifetime(ast::Ident), + Lifetime(ast::Name), Interpolated(Lrc), @@ -364,7 +364,23 @@ impl TokenKind { _ => false, } } +} + +impl Token { + /// Returns a lifetime identifier if this token is a lifetime. + pub fn lifetime(&self) -> Option { + match self.kind { + Lifetime(name) => Some(ast::Ident::new(name, self.span)), + Interpolated(ref nt) => match **nt { + NtLifetime(ident) => Some(ident), + _ => None, + }, + _ => None, + } + } +} +impl TokenKind { /// Returns an identifier if this token is an identifier. pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> { match *self { @@ -376,12 +392,12 @@ impl TokenKind { _ => None, } } - /// Returns a lifetime identifier if this token is a lifetime. - pub fn lifetime(&self) -> Option { + /// Returns a lifetime name if this token is a lifetime. + pub fn lifetime_name(&self) -> Option { match *self { - Lifetime(ident) => Some(ident), + Lifetime(name) => Some(name), Interpolated(ref nt) => match **nt { - NtLifetime(ident) => Some(ident), + NtLifetime(ident) => Some(ident.name), _ => None, }, _ => None, @@ -393,7 +409,7 @@ impl TokenKind { } /// Returns `true` if the token is a lifetime. crate fn is_lifetime(&self) -> bool { - self.lifetime().is_some() + self.lifetime_name().is_some() } /// Returns `true` if the token is a identifier whose name is the given @@ -521,13 +537,7 @@ impl TokenKind { _ => return None, }, SingleQuote => match joint { - Ident(ident, false) => { - let name = Symbol::intern(&format!("'{}", ident)); - Lifetime(symbol::Ident { - name, - span: ident.span, - }) - } + Ident(ident, false) => Lifetime(Symbol::intern(&format!("'{}", ident))), _ => return None, }, @@ -597,7 +607,7 @@ impl TokenKind { (&Literal(a), &Literal(b)) => a == b, - (&Lifetime(a), &Lifetime(b)) => a.name == b.name, + (&Lifetime(a), &Lifetime(b)) => a == b, (&Ident(a, b), &Ident(c, d)) => b == d && (a.name == c.name || a.name == kw::DollarCrate || c.name == kw::DollarCrate), @@ -732,8 +742,7 @@ impl Nonterminal { Some(TokenTree::token(ident.span, token).into()) } Nonterminal::NtLifetime(ident) => { - let token = Lifetime(ident); - Some(TokenTree::token(ident.span, token).into()) + Some(TokenTree::token(ident.span, Lifetime(ident.name)).into()) } Nonterminal::NtTT(ref tt) => { Some(tt.clone().into()) diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index 26eb9e9d4fc1f..6ab613d2abd71 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -145,8 +145,8 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> Ident(ident, false) if ident.name == kw::DollarCrate => tt!(Ident::dollar_crate()), Ident(ident, is_raw) => tt!(Ident::new(ident.name, is_raw)), - Lifetime(ident) => { - let ident = ident.without_first_quote(); + Lifetime(name) => { + let ident = ast::Ident::new(name, span).without_first_quote(); stack.push(tt!(Ident::new(ident.name, false))); tt!(Punct::new('\'', true)) } From f745e5f9b676be02cc1dfbab0bfb338dc72b4dd3 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 5 Jun 2019 11:56:06 +0300 Subject: [PATCH 09/14] syntax: Remove duplicate span from `token::Ident` --- src/librustc/ich/impls_syntax.rs | 4 +- src/librustdoc/html/highlight.rs | 4 +- src/libsyntax/attr/mod.rs | 10 +-- src/libsyntax/diagnostics/plugin.rs | 18 ++--- src/libsyntax/ext/base.rs | 2 +- src/libsyntax/ext/tt/macro_parser.rs | 39 +++++------ src/libsyntax/ext/tt/macro_rules.rs | 12 ++-- src/libsyntax/ext/tt/quoted.rs | 5 +- src/libsyntax/mut_visit.rs | 1 - src/libsyntax/parse/diagnostics.rs | 6 +- src/libsyntax/parse/lexer/mod.rs | 22 ++---- src/libsyntax/parse/literal.rs | 8 +-- src/libsyntax/parse/mod.rs | 31 ++++----- src/libsyntax/parse/parser.rs | 48 ++++++------- src/libsyntax/parse/token.rs | 94 ++++++++++++++++---------- src/libsyntax/tokenstream.rs | 4 +- src/libsyntax_ext/concat_idents.rs | 4 +- src/libsyntax_ext/format.rs | 6 +- src/libsyntax_ext/proc_macro_decls.rs | 4 +- src/libsyntax_ext/proc_macro_server.rs | 13 ++-- src/libsyntax_pos/symbol.rs | 30 ++++---- 21 files changed, 181 insertions(+), 184 deletions(-) diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index 20d308e5fe87b..abe4196abd19c 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -353,8 +353,8 @@ impl<'a> HashStable> for token::TokenKind { } token::Literal(lit) => lit.hash_stable(hcx, hasher), - token::Ident(ident, is_raw) => { - ident.name.hash_stable(hcx, hasher); + token::Ident(name, is_raw) => { + name.hash_stable(hcx, hasher); is_raw.hash_stable(hcx, hasher); } token::Lifetime(name) => name.hash_stable(hcx, hasher), diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index bc6eaaaa8b9f0..281bd72deeb80 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -325,8 +325,8 @@ impl<'a> Classifier<'a> { } // Keywords are also included in the identifier set. - token::Ident(ident, is_raw) => { - match ident.name { + token::Ident(name, is_raw) => { + match name { kw::Ref | kw::Mut if !is_raw => Class::RefKeyWord, kw::SelfLower | kw::SelfUpper => Class::Self_, diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 56afc8728b4ca..39ffabaa4a948 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -482,19 +482,19 @@ impl MetaItem { let path = match tokens.next() { Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) | Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: { - let mut segments = if let token::Ident(ident, _) = kind { + let mut segments = if let token::Ident(name, _) = kind { if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() { tokens.next(); - vec![PathSegment::from_ident(ident.with_span_pos(span))] + vec![PathSegment::from_ident(Ident::new(name, span))] } else { - break 'arm Path::from_ident(ident.with_span_pos(span)); + break 'arm Path::from_ident(Ident::new(name, span)); } } else { vec![PathSegment::path_root(span)] }; loop { - if let Some(TokenTree::Token(Token { kind: token::Ident(ident, _), span })) = tokens.next() { - segments.push(PathSegment::from_ident(ident.with_span_pos(span))); + if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span })) = tokens.next() { + segments.push(PathSegment::from_ident(Ident::new(name, span))); } else { return None; } diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index b342e4bc47274..8d9848d98fb21 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -39,7 +39,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>, }; ecx.parse_sess.registered_diagnostics.with_lock(|diagnostics| { - match diagnostics.get_mut(&code.name) { + match diagnostics.get_mut(&code) { // Previously used errors. Some(&mut ErrorInfo { description: _, use_site: Some(previous_span) }) => { ecx.struct_span_warn(span, &format!( @@ -72,10 +72,10 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>, token_tree.get(1), token_tree.get(2) ) { - (1, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })), None, None) => { + (1, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })), None, None) => { (code, None) }, - (3, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })), + (3, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })), Some(&TokenTree::Token(Token { kind: token::Comma, .. })), Some(&TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), .. }))) => { (code, Some(symbol)) @@ -112,7 +112,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>, description, use_site: None }; - if diagnostics.insert(code.name, info).is_some() { + if diagnostics.insert(code, info).is_some() { ecx.span_err(span, &format!( "diagnostic code {} already registered", code )); @@ -140,13 +140,13 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>, token_tree: &[TokenTree]) -> Box { assert_eq!(token_tree.len(), 3); - let (crate_name, name) = match (&token_tree[0], &token_tree[2]) { + let (crate_name, ident) = match (&token_tree[0], &token_tree[2]) { ( // Crate name. - &TokenTree::Token(Token { kind: token::Ident(ref crate_name, _), .. }), + &TokenTree::Token(Token { kind: token::Ident(crate_name, _), .. }), // DIAGNOSTICS ident. - &TokenTree::Token(Token { kind: token::Ident(ref name, _), .. }) - ) => (*&crate_name, name), + &TokenTree::Token(Token { kind: token::Ident(name, _), span }) + ) => (crate_name, Ident::new(name, span)), _ => unreachable!() }; @@ -209,7 +209,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>, MacEager::items(smallvec![ P(ast::Item { - ident: *name, + ident, attrs: Vec::new(), id: ast::DUMMY_NODE_ID, node: ast::ItemKind::Const( diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 0c2ab67240741..3b24837e36598 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -269,7 +269,7 @@ impl TTMacroExpander for F if let token::Interpolated(nt) = &token.kind { if let token::NtIdent(ident, is_raw) = **nt { *tt = tokenstream::TokenTree::token(ident.span, - token::Ident(ident, is_raw)); + token::Ident(ident.name, is_raw)); } } } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index f93b548c50106..82cc9e8ac2280 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -74,7 +74,7 @@ pub use NamedMatch::*; pub use ParseResult::*; use TokenTreeOrTokenTreeSlice::*; -use crate::ast::Ident; +use crate::ast::{Ident, Name}; use crate::ext::tt::quoted::{self, TokenTree}; use crate::parse::{Directory, ParseSess}; use crate::parse::parser::{Parser, PathStyle}; @@ -429,8 +429,8 @@ pub fn parse_failure_msg(tok: TokenKind) -> String { /// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison) fn token_name_eq(t1: &TokenKind, t2: &TokenKind) -> bool { - if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) { - id1.name == id2.name && is_raw1 == is_raw2 + if let (Some((name1, is_raw1)), Some((name2, is_raw2))) = (t1.ident_name(), t2.ident_name()) { + name1 == name2 && is_raw1 == is_raw2 } else if let (Some(name1), Some(name2)) = (t1.lifetime_name(), t2.lifetime_name()) { name1 == name2 } else { @@ -466,8 +466,7 @@ fn inner_parse_loop<'root, 'tt>( next_items: &mut Vec>, eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, - token: &TokenKind, - span: syntax_pos::Span, + token: &Token, ) -> ParseResult<()> { // Pop items from `cur_items` until it is empty. while let Some(mut item) = cur_items.pop() { @@ -510,7 +509,7 @@ fn inner_parse_loop<'root, 'tt>( // Add matches from this repetition to the `matches` of `up` for idx in item.match_lo..item.match_hi { let sub = item.matches[idx].clone(); - let span = DelimSpan::from_pair(item.sp_open, span); + let span = DelimSpan::from_pair(item.sp_open, token.span); new_pos.push_match(idx, MatchedSeq(sub, span)); } @@ -598,7 +597,7 @@ fn inner_parse_loop<'root, 'tt>( TokenTree::MetaVarDecl(_, _, id) => { // Built-in nonterminals never start with these tokens, // so we can eliminate them from consideration. - if may_begin_with(id.name, token) { + if may_begin_with(token, id.name) { bb_items.push(item); } } @@ -698,7 +697,6 @@ pub fn parse( &mut eof_items, &mut bb_items, &parser.token, - parser.span, ) { Success(_) => {} Failure(token, msg) => return Failure(token, msg), @@ -806,10 +804,9 @@ pub fn parse( /// The token is an identifier, but not `_`. /// We prohibit passing `_` to macros expecting `ident` for now. -fn get_macro_ident(token: &TokenKind) -> Option<(Ident, bool)> { +fn get_macro_name(token: &TokenKind) -> Option<(Name, bool)> { match *token { - token::Ident(ident, is_raw) if ident.name != kw::Underscore => - Some((ident, is_raw)), + token::Ident(name, is_raw) if name != kw::Underscore => Some((name, is_raw)), _ => None, } } @@ -818,7 +815,7 @@ fn get_macro_ident(token: &TokenKind) -> Option<(Ident, bool)> { /// /// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with that /// token. Be conservative (return true) if not sure. -fn may_begin_with(name: Symbol, token: &TokenKind) -> bool { +fn may_begin_with(token: &Token, name: Name) -> bool { /// Checks whether the non-terminal may contain a single (non-keyword) identifier. fn may_be_ident(nt: &token::Nonterminal) -> bool { match *nt { @@ -830,14 +827,14 @@ fn may_begin_with(name: Symbol, token: &TokenKind) -> bool { match name { sym::expr => token.can_begin_expr(), sym::ty => token.can_begin_type(), - sym::ident => get_macro_ident(token).is_some(), + sym::ident => get_macro_name(token).is_some(), sym::literal => token.can_begin_literal_or_bool(), - sym::vis => match *token { + sym::vis => match token.kind { // The follow-set of :vis + "priv" keyword + interpolated token::Comma | token::Ident(..) | token::Interpolated(_) => true, _ => token.can_begin_type(), }, - sym::block => match *token { + sym::block => match token.kind { token::OpenDelim(token::Brace) => true, token::Interpolated(ref nt) => match **nt { token::NtItem(_) @@ -851,7 +848,7 @@ fn may_begin_with(name: Symbol, token: &TokenKind) -> bool { }, _ => false, }, - sym::path | sym::meta => match *token { + sym::path | sym::meta => match token.kind { token::ModSep | token::Ident(..) => true, token::Interpolated(ref nt) => match **nt { token::NtPath(_) | token::NtMeta(_) => true, @@ -859,7 +856,7 @@ fn may_begin_with(name: Symbol, token: &TokenKind) -> bool { }, _ => false, }, - sym::pat => match *token { + sym::pat => match token.kind { token::Ident(..) | // box, ref, mut, and other identifiers (can stricten) token::OpenDelim(token::Paren) | // tuple pattern token::OpenDelim(token::Bracket) | // slice pattern @@ -875,7 +872,7 @@ fn may_begin_with(name: Symbol, token: &TokenKind) -> bool { token::Interpolated(ref nt) => may_be_ident(nt), _ => false, }, - sym::lifetime => match *token { + sym::lifetime => match token.kind { token::Lifetime(_) => true, token::Interpolated(ref nt) => match **nt { token::NtLifetime(_) | token::NtTT(_) => true, @@ -883,7 +880,7 @@ fn may_begin_with(name: Symbol, token: &TokenKind) -> bool { }, _ => false, }, - _ => match *token { + _ => match token.kind { token::CloseDelim(_) => false, _ => true, }, @@ -929,10 +926,10 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: Symbol) -> Nonterminal { sym::literal => token::NtLiteral(panictry!(p.parse_literal_maybe_minus())), sym::ty => token::NtTy(panictry!(p.parse_ty())), // this could be handled like a token, since it is one - sym::ident => if let Some((ident, is_raw)) = get_macro_ident(&p.token) { + sym::ident => if let Some((name, is_raw)) = get_macro_name(&p.token) { let span = p.span; p.bump(); - token::NtIdent(Ident::new(ident.name, span), is_raw) + token::NtIdent(Ident::new(name, span), is_raw) } else { let token_str = pprust::token_to_string(&p.token); p.fatal(&format!("expected ident, found {}", &token_str)).emit(); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 05e921b1bfd1a..77f53c35b0b5e 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -1046,8 +1046,7 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { match tok { TokenTree::Token(token) => match token.kind { FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes, - Ident(i, false) if i.name == kw::If || - i.name == kw::In => IsInFollow::Yes, + Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes, _ => IsInFollow::No(tokens), }, _ => IsInFollow::No(tokens), @@ -1064,8 +1063,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { OpenDelim(token::DelimToken::Bracket) | Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi | BinOp(token::Or) => IsInFollow::Yes, - Ident(i, false) if i.name == kw::As || - i.name == kw::Where => IsInFollow::Yes, + Ident(name, false) if name == kw::As || + name == kw::Where => IsInFollow::Yes, _ => IsInFollow::No(tokens), }, TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::block => @@ -1092,9 +1091,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { match tok { TokenTree::Token(token) => match token.kind { Comma => IsInFollow::Yes, - Ident(i, is_raw) if is_raw || i.name != kw::Priv => - IsInFollow::Yes, - ref tok => if tok.can_begin_type() { + Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes, + _ => if token.can_begin_type() { IsInFollow::Yes } else { IsInFollow::No(tokens) diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 558b07af6110c..582d87b911dbc 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -323,10 +323,9 @@ where // metavariable that names the crate of the invocation. Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => { let (ident, is_raw) = token.ident().unwrap(); - let span = token.span.with_lo(span.lo()); + let span = ident.span.with_lo(span.lo()); if ident.name == kw::Crate && !is_raw { - let ident = ast::Ident::new(kw::DollarCrate, ident.span); - TokenTree::token(span, token::Ident(ident, is_raw)) + TokenTree::token(span, token::Ident(kw::DollarCrate, is_raw)) } else { TokenTree::MetaVar(span, ident) } diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index 3bb36605299fd..7eb88de2281da 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -598,7 +598,6 @@ pub fn noop_visit_tts(TokenStream(tts): &mut TokenStream, vis: &m // apply ident visitor if it's an ident, apply other visits to interpolated nodes pub fn noop_visit_token(t: &mut TokenKind, vis: &mut T) { match t { - token::Ident(id, _is_raw) => vis.visit_ident(id), token::Interpolated(nt) => { let mut nt = Lrc::make_mut(nt); vis.visit_interpolated(&mut nt); diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs index 1759a229cf49d..7830b2ce880c2 100644 --- a/src/libsyntax/parse/diagnostics.rs +++ b/src/libsyntax/parse/diagnostics.rs @@ -201,12 +201,12 @@ impl<'a> Parser<'a> { self.span, &format!("expected identifier, found {}", self.this_token_descr()), ); - if let token::Ident(ident, false) = &self.token.kind { - if ident.is_raw_guess() { + if let token::Ident(name, false) = self.token.kind { + if Ident::new(name, self.span).is_raw_guess() { err.span_suggestion( self.span, "you can escape reserved keywords to use them as identifiers", - format!("r#{}", ident), + format!("r#{}", name), Applicability::MaybeIncorrect, ); } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index da8c6f5ac2204..e3d959c2c54c4 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1,4 +1,4 @@ -use crate::ast::{self, Ident}; +use crate::ast; use crate::parse::ParseSess; use crate::parse::token::{self, Token, TokenKind}; use crate::symbol::{sym, Symbol}; @@ -61,15 +61,6 @@ impl<'a> StringReader<'a> { (real, raw) } - fn mk_ident(&self, string: &str) -> Ident { - let mut ident = Ident::from_str(string); - if let Some(span) = self.override_span { - ident.span = span; - } - - ident - } - fn unwrap_or_abort(&mut self, res: Result) -> Token { match res { Ok(tok) => tok, @@ -858,17 +849,17 @@ impl<'a> StringReader<'a> { return Ok(self.with_str_from(start, |string| { // FIXME: perform NFKC normalization here. (Issue #2253) - let ident = self.mk_ident(string); + let name = ast::Name::intern(string); if is_raw_ident { let span = self.mk_sp(raw_start, self.pos); - if !ident.can_be_raw() { - self.err_span(span, &format!("`{}` cannot be a raw identifier", ident)); + if !name.can_be_raw() { + self.err_span(span, &format!("`{}` cannot be a raw identifier", name)); } self.sess.raw_identifier_spans.borrow_mut().push(span); } - token::Ident(ident, is_raw_ident) + token::Ident(name, is_raw_ident) })); } } @@ -1567,12 +1558,11 @@ mod tests { &sh, "/* my source file */ fn main() { println!(\"zebra\"); }\n" .to_string()); - let id = Ident::from_str("fn"); assert_eq!(string_reader.next_token(), token::Comment); assert_eq!(string_reader.next_token(), token::Whitespace); let tok1 = string_reader.next_token(); let tok2 = Token::new( - token::Ident(id, false), + token::Ident(Symbol::intern("fn"), false), Span::new(BytePos(21), BytePos(23), NO_EXPANSION), ); assert_eq!(tok1.kind, tok2.kind); diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 978fd205ea489..7b27304071c7f 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -1,6 +1,6 @@ //! Code related to parsing literals. -use crate::ast::{self, Ident, Lit, LitKind}; +use crate::ast::{self, Lit, LitKind}; use crate::parse::parser::Parser; use crate::parse::PResult; use crate::parse::token::{self, Token, TokenKind}; @@ -230,8 +230,8 @@ impl Lit { /// Converts arbitrary token into an AST literal. crate fn from_token(token: &TokenKind, span: Span) -> Result { let lit = match *token { - token::Ident(ident, false) if ident.name == kw::True || ident.name == kw::False => - token::Lit::new(token::Bool, ident.name, None), + token::Ident(name, false) if name == kw::True || name == kw::False => + token::Lit::new(token::Bool, name, None), token::Literal(lit) => lit, token::Interpolated(ref nt) => { @@ -258,7 +258,7 @@ impl Lit { /// Losslessly convert an AST literal into a token stream. crate fn tokens(&self) -> TokenStream { let token = match self.token.kind { - token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false), + token::Bool => token::Ident(self.token.symbol, false), _ => token::Literal(self.token), }; TokenTree::token(self.span, token).into() diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 5187621258d0d..2b82767d7e91d 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -382,11 +382,12 @@ impl SeqSep { #[cfg(test)] mod tests { use super::*; - use crate::ast::{self, Ident, PatKind}; + use crate::ast::{self, Name, PatKind}; use crate::attr::first_attr_value_str_by_name; use crate::ptr::P; use crate::parse::token::Token; use crate::print::pprust::item_to_string; + use crate::symbol::{kw, sym}; use crate::tokenstream::{DelimSpan, TokenTree}; use crate::util::parser_testing::string_to_stream; use crate::util::parser_testing::{string_to_expr, string_to_item}; @@ -418,8 +419,6 @@ mod tests { #[test] fn string_to_tts_macro () { with_default_globals(|| { - use crate::symbol::sym; - let tts: Vec<_> = string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); let tts: &[TokenTree] = &tts[..]; @@ -432,8 +431,7 @@ mod tests { Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })), Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)), ) - if name_macro_rules.name == sym::macro_rules - && name_zip.name.as_str() == "zip" => { + if name_macro_rules == sym::macro_rules && name_zip.as_str() == "zip" => { let tts = ¯o_tts.trees().collect::>(); match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { ( @@ -448,9 +446,9 @@ mod tests { ( 2, Some(&TokenTree::Token(Token { kind: token::Dollar, .. })), - Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })), + Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })), ) - if first_delim == token::Paren && ident.name.as_str() == "a" => {}, + if first_delim == token::Paren && name.as_str() == "a" => {}, _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), } let tts = &second_tts.trees().collect::>(); @@ -458,9 +456,9 @@ mod tests { ( 2, Some(&TokenTree::Token(Token { kind: token::Dollar, .. })), - Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })), + Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })), ) - if second_delim == token::Paren && ident.name.as_str() == "a" => {}, + if second_delim == token::Paren && name.as_str() == "a" => {}, _ => panic!("value 4: {:?} {:?}", second_delim, second_tts), } }, @@ -478,25 +476,22 @@ mod tests { let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); let expected = TokenStream::new(vec![ - TokenTree::token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(), - TokenTree::token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(), + TokenTree::token(sp(0, 2), token::Ident(kw::Fn, false)).into(), + TokenTree::token(sp(3, 4), token::Ident(Name::intern("a"), false)).into(), TokenTree::Delimited( DelimSpan::from_pair(sp(5, 6), sp(13, 14)), token::DelimToken::Paren, TokenStream::new(vec![ - TokenTree::token(sp(6, 7), - token::Ident(Ident::from_str("b"), false)).into(), + TokenTree::token(sp(6, 7), token::Ident(Name::intern("b"), false)).into(), TokenTree::token(sp(8, 9), token::Colon).into(), - TokenTree::token(sp(10, 13), - token::Ident(Ident::from_str("i32"), false)).into(), + TokenTree::token(sp(10, 13), token::Ident(sym::i32, false)).into(), ]).into(), ).into(), TokenTree::Delimited( DelimSpan::from_pair(sp(15, 16), sp(20, 21)), token::DelimToken::Brace, TokenStream::new(vec![ - TokenTree::token(sp(17, 18), - token::Ident(Ident::from_str("b"), false)).into(), + TokenTree::token(sp(17, 18), token::Ident(Name::intern("b"), false)).into(), TokenTree::token(sp(18, 19), token::Semi).into(), ]).into(), ).into() @@ -604,8 +599,6 @@ mod tests { #[test] fn crlf_doc_comments() { with_default_globals(|| { - use crate::symbol::sym; - let sess = ParseSess::new(FilePathMapping::empty()); let name_1 = FileName::Custom("crlf_source_1".to_string()); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 362f81d02a043..57a49d1524d81 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -362,7 +362,7 @@ impl TokenCursor { delim_span, token::Bracket, [ - TokenTree::token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)), + TokenTree::token(sp, token::Ident(sym::doc, false)), TokenTree::token(sp, token::Eq), TokenTree::token(sp, token::TokenKind::lit( token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None @@ -541,9 +541,9 @@ impl<'a> Parser<'a> { crate fn token_descr(&self) -> Option<&'static str> { Some(match &self.token.kind { - t if t.is_special_ident() => "reserved identifier", - t if t.is_used_keyword() => "keyword", - t if t.is_unused_keyword() => "reserved keyword", + _ if self.token.is_special_ident() => "reserved identifier", + _ if self.token.is_used_keyword() => "keyword", + _ if self.token.is_unused_keyword() => "reserved keyword", token::DocComment(..) => "doc comment", _ => return None, }) @@ -619,7 +619,7 @@ impl<'a> Parser<'a> { fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> { match self.token.kind { - token::Ident(ident, _) => { + token::Ident(name, _) => { if self.token.is_reserved_ident() { let mut err = self.expected_ident_found(); if recover { @@ -630,7 +630,7 @@ impl<'a> Parser<'a> { } let span = self.span; self.bump(); - Ok(Ident::new(ident.name, span)) + Ok(Ident::new(name, span)) } _ => { Err(if self.prev_token_kind == PrevTokenKind::DocComment { @@ -1618,10 +1618,10 @@ impl<'a> Parser<'a> { fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { match self.token.kind { - token::Ident(ident, _) if self.token.is_path_segment_keyword() => { + token::Ident(name, _) if name.is_path_segment_keyword() => { let span = self.span; self.bump(); - Ok(Ident::new(ident.name, span)) + Ok(Ident::new(name, span)) } _ => self.parse_ident(), } @@ -1629,10 +1629,10 @@ impl<'a> Parser<'a> { fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> { match self.token.kind { - token::Ident(ident, false) if ident.name == kw::Underscore => { + token::Ident(name, false) if name == kw::Underscore => { let span = self.span; self.bump(); - Ok(Ident::new(ident.name, span)) + Ok(Ident::new(name, span)) } _ => self.parse_ident(), } @@ -2368,13 +2368,11 @@ impl<'a> Parser<'a> { } let mut recovery_field = None; - if let token::Ident(ident, _) = self.token.kind { + if let token::Ident(name, _) = self.token.kind { if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) { // Use in case of error after field-looking code: `S { foo: () with a }` - let mut ident = ident.clone(); - ident.span = self.span; recovery_field = Some(ast::Field { - ident, + ident: Ident::new(name, self.span), span: self.span, expr: self.mk_expr(self.span, ExprKind::Err, ThinVec::new()), is_shorthand: false, @@ -2637,7 +2635,7 @@ impl<'a> Parser<'a> { self.look_ahead(1, |t| t.is_ident()) => { self.bump(); let name = match self.token.kind { - token::Ident(ident, _) => ident, + token::Ident(name, _) => name, _ => unreachable!() }; let mut err = self.fatal(&format!("unknown macro variable `{}`", name)); @@ -2651,7 +2649,7 @@ impl<'a> Parser<'a> { // Interpolated identifier and lifetime tokens are replaced with usual identifier // and lifetime tokens, so the former are never encountered during normal parsing. match **nt { - token::NtIdent(ident, is_raw) => Token::new(token::Ident(ident, is_raw), ident.span), + token::NtIdent(ident, is_raw) => Token::new(token::Ident(ident.name, is_raw), ident.span), token::NtLifetime(ident) => Token::new(token::Lifetime(ident.name), ident.span), _ => return, } @@ -2766,7 +2764,7 @@ impl<'a> Parser<'a> { let token_cannot_continue_expr = |t: &Token| match t.kind { // These tokens can start an expression after `!`, but // can't continue an expression after an ident - token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw), + token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw), token::Literal(..) | token::Pound => true, token::Interpolated(ref nt) => match **nt { token::NtIdent(..) | token::NtExpr(..) | @@ -4328,7 +4326,7 @@ impl<'a> Parser<'a> { -> PResult<'a, Option>> { let token_lo = self.span; let (ident, def) = match self.token.kind { - token::Ident(ident, false) if ident.name == kw::Macro => { + token::Ident(name, false) if name == kw::Macro => { self.bump(); let ident = self.parse_ident()?; let tokens = if self.check(&token::OpenDelim(token::Brace)) { @@ -4356,8 +4354,8 @@ impl<'a> Parser<'a> { (ident, ast::MacroDef { tokens: tokens.into(), legacy: false }) } - token::Ident(ident, _) if ident.name == sym::macro_rules && - self.look_ahead(1, |t| *t == token::Not) => { + token::Ident(name, _) if name == sym::macro_rules && + self.look_ahead(1, |t| *t == token::Not) => { let prev_span = self.prev_span; self.complain_if_pub_macro(&vis.node, prev_span); self.bump(); @@ -5481,8 +5479,8 @@ impl<'a> Parser<'a> { fn parse_self_arg(&mut self) -> PResult<'a, Option> { let expect_ident = |this: &mut Self| match this.token.kind { // Preserve hygienic context. - token::Ident(ident, _) => - { let span = this.span; this.bump(); Ident::new(ident.name, span) } + token::Ident(name, _) => + { let span = this.span; this.bump(); Ident::new(name, span) } _ => unreachable!() }; let isolated_self = |this: &mut Self, n| { @@ -5805,11 +5803,7 @@ impl<'a> Parser<'a> { match *vis { VisibilityKind::Inherited => {} _ => { - let is_macro_rules: bool = match self.token.kind { - token::Ident(sid, _) => sid.name == sym::macro_rules, - _ => false, - }; - let mut err = if is_macro_rules { + let mut err = if self.token.is_keyword(sym::macro_rules) { let mut err = self.diagnostic() .struct_span_err(sp, "can't qualify macro_rules invocation with `pub`"); err.span_suggestion( diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 81c93a4179e36..ba7c88e700074 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -118,8 +118,8 @@ impl Lit { } } -pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool { - let ident_token: TokenKind = Ident(ident, is_raw); +pub(crate) fn ident_can_begin_expr(name: ast::Name, span: Span, is_raw: bool) -> bool { + let ident_token = Token::new(Ident(name, is_raw), span); !ident_token.is_reserved_ident() || ident_token.is_path_segment_keyword() || @@ -146,11 +146,11 @@ pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool { kw::While, kw::Yield, kw::Static, - ].contains(&ident.name) + ].contains(&name) } -fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool { - let ident_token: TokenKind = Ident(ident, is_raw); +fn ident_can_begin_type(name: ast::Name, span: Span, is_raw: bool) -> bool { + let ident_token = Token::new(Ident(name, is_raw), span); !ident_token.is_reserved_ident() || ident_token.is_path_segment_keyword() || @@ -163,7 +163,7 @@ fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool { kw::Extern, kw::Typeof, kw::Dyn, - ].contains(&ident.name) + ].contains(&name) } #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] @@ -210,7 +210,7 @@ pub enum TokenKind { Literal(Lit), /* Name components */ - Ident(ast::Ident, /* is_raw */ bool), + Ident(ast::Name, /* is_raw */ bool), Lifetime(ast::Name), Interpolated(Lrc), @@ -245,7 +245,7 @@ pub struct Token { impl TokenKind { /// Recovers a `TokenKind` from an `ast::Ident`. This creates a raw identifier if necessary. pub fn from_ast_ident(ident: ast::Ident) -> TokenKind { - Ident(ident, ident.is_raw_guess()) + Ident(ident.name, ident.is_raw_guess()) } crate fn is_like_plus(&self) -> bool { @@ -254,12 +254,14 @@ impl TokenKind { _ => false, } } +} +impl Token { /// Returns `true` if the token can appear at the start of an expression. crate fn can_begin_expr(&self) -> bool { - match *self { - Ident(ident, is_raw) => - ident_can_begin_expr(ident, is_raw), // value name or keyword + match self.kind { + Ident(name, is_raw) => + ident_can_begin_expr(name, self.span, is_raw), // value name or keyword OpenDelim(..) | // tuple, array or block Literal(..) | // literal Not | // operator not @@ -289,9 +291,9 @@ impl TokenKind { /// Returns `true` if the token can appear at the start of a type. crate fn can_begin_type(&self) -> bool { - match *self { - Ident(ident, is_raw) => - ident_can_begin_type(ident, is_raw), // type name or keyword + match self.kind { + Ident(name, is_raw) => + ident_can_begin_type(name, self.span, is_raw), // type name or keyword OpenDelim(Paren) | // tuple OpenDelim(Bracket) | // array Not | // never @@ -309,7 +311,9 @@ impl TokenKind { _ => false, } } +} +impl TokenKind { /// Returns `true` if the token can appear at the start of a const param. pub fn can_begin_const_arg(&self) -> bool { match self { @@ -323,13 +327,17 @@ impl TokenKind { _ => self.can_begin_literal_or_bool(), } } +} +impl Token { /// Returns `true` if the token can appear at the start of a generic bound. crate fn can_begin_bound(&self) -> bool { self.is_path_start() || self.is_lifetime() || self.is_keyword(kw::For) || self == &Question || self == &OpenDelim(Paren) } +} +impl TokenKind { pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option) -> TokenKind { Literal(Lit::new(kind, symbol, suffix)) } @@ -355,8 +363,8 @@ impl TokenKind { match *self { Literal(..) => true, BinOp(Minus) => true, - Ident(ident, false) if ident.name == kw::True => true, - Ident(ident, false) if ident.name == kw::False => true, + Ident(name, false) if name == kw::True => true, + Ident(name, false) if name == kw::False => true, Interpolated(ref nt) => match **nt { NtLiteral(..) => true, _ => false, @@ -367,6 +375,18 @@ impl TokenKind { } impl Token { + /// Returns an identifier if this token is an identifier. + pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> { + match self.kind { + Ident(name, is_raw) => Some((ast::Ident::new(name, self.span), is_raw)), + Interpolated(ref nt) => match **nt { + NtIdent(ident, is_raw) => Some((ident, is_raw)), + _ => None, + }, + _ => None, + } + } + /// Returns a lifetime identifier if this token is a lifetime. pub fn lifetime(&self) -> Option { match self.kind { @@ -381,12 +401,12 @@ impl Token { } impl TokenKind { - /// Returns an identifier if this token is an identifier. - pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> { + /// Returns an identifier name if this token is an identifier. + pub fn ident_name(&self) -> Option<(ast::Name, /* is_raw */ bool)> { match *self { - Ident(ident, is_raw) => Some((ident, is_raw)), + Ident(name, is_raw) => Some((name, is_raw)), Interpolated(ref nt) => match **nt { - NtIdent(ident, is_raw) => Some((ident, is_raw)), + NtIdent(ident, is_raw) => Some((ident.name, is_raw)), _ => None, }, _ => None, @@ -405,7 +425,7 @@ impl TokenKind { } /// Returns `true` if the token is an identifier. pub fn is_ident(&self) -> bool { - self.ident().is_some() + self.ident_name().is_some() } /// Returns `true` if the token is a lifetime. crate fn is_lifetime(&self) -> bool { @@ -415,10 +435,7 @@ impl TokenKind { /// Returns `true` if the token is a identifier whose name is the given /// string slice. crate fn is_ident_named(&self, name: Symbol) -> bool { - match self.ident() { - Some((ident, _)) => ident.name == name, - None => false - } + self.ident_name().map_or(false, |(ident_name, _)| ident_name == name) } /// Returns `true` if the token is an interpolated path. @@ -440,24 +457,30 @@ impl TokenKind { crate fn is_qpath_start(&self) -> bool { self == &Lt || self == &BinOp(Shl) } +} +impl Token { crate fn is_path_start(&self) -> bool { self == &ModSep || self.is_qpath_start() || self.is_path() || self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident() } +} +impl TokenKind { /// Returns `true` if the token is a given keyword, `kw`. pub fn is_keyword(&self, kw: Symbol) -> bool { - self.ident().map(|(ident, is_raw)| ident.name == kw && !is_raw).unwrap_or(false) + self.ident_name().map(|(name, is_raw)| name == kw && !is_raw).unwrap_or(false) } pub fn is_path_segment_keyword(&self) -> bool { - match self.ident() { - Some((id, false)) => id.is_path_segment_keyword(), + match self.ident_name() { + Some((name, false)) => name.is_path_segment_keyword(), _ => false, } } +} +impl Token { // Returns true for reserved identifiers used internally for elided lifetimes, // unnamed method parameters, crate root module, error recovery etc. pub fn is_special_ident(&self) -> bool { @@ -490,7 +513,9 @@ impl TokenKind { _ => false, } } +} +impl TokenKind { crate fn glue(self, joint: TokenKind) -> Option { Some(match self { Eq => match joint { @@ -537,7 +562,7 @@ impl TokenKind { _ => return None, }, SingleQuote => match joint { - Ident(ident, false) => Lifetime(Symbol::intern(&format!("'{}", ident))), + Ident(name, false) => Lifetime(Symbol::intern(&format!("'{}", name))), _ => return None, }, @@ -608,9 +633,9 @@ impl TokenKind { (&Literal(a), &Literal(b)) => a == b, (&Lifetime(a), &Lifetime(b)) => a == b, - (&Ident(a, b), &Ident(c, d)) => b == d && (a.name == c.name || - a.name == kw::DollarCrate || - c.name == kw::DollarCrate), + (&Ident(a, b), &Ident(c, d)) => b == d && (a == c || + a == kw::DollarCrate || + c == kw::DollarCrate), (&Interpolated(_), &Interpolated(_)) => false, @@ -738,8 +763,7 @@ impl Nonterminal { prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) } Nonterminal::NtIdent(ident, is_raw) => { - let token = Ident(ident, is_raw); - Some(TokenTree::token(ident.span, token).into()) + Some(TokenTree::token(ident.span, Ident(ident.name, is_raw)).into()) } Nonterminal::NtLifetime(ident) => { Some(TokenTree::token(ident.span, Lifetime(ident.name)).into()) @@ -827,7 +851,7 @@ fn prepend_attrs(sess: &ParseSess, // For simple paths, push the identifier directly if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() { let ident = attr.path.segments[0].ident; - let token = Ident(ident, ident.as_str().starts_with("r#")); + let token = Ident(ident.name, ident.as_str().starts_with("r#")); brackets.push(tokenstream::TokenTree::token(ident.span, token)); // ... and for more complicated paths, fall back to a reparse hack that diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 140b77b6b5f91..bb80c1a1b3f33 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -575,7 +575,7 @@ impl DelimSpan { #[cfg(test)] mod tests { use super::*; - use crate::syntax::ast::Ident; + use crate::syntax::ast::Name; use crate::with_default_globals; use crate::util::parser_testing::string_to_stream; use syntax_pos::{Span, BytePos, NO_EXPANSION}; @@ -660,7 +660,7 @@ mod tests { with_default_globals(|| { let test0: TokenStream = Vec::::new().into_iter().collect(); let test1: TokenStream = - TokenTree::token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into(); + TokenTree::token(sp(0, 1), token::Ident(Name::intern("a"), false)).into(); let test2 = string_to_ts("foo(bar::baz)"); assert_eq!(test0.is_empty(), true); diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs index 59f25af374276..8f061abc77b8d 100644 --- a/src/libsyntax_ext/concat_idents.rs +++ b/src/libsyntax_ext/concat_idents.rs @@ -38,8 +38,8 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>, } } else { match *e { - TokenTree::Token(Token { kind: token::Ident(ident, _), .. }) => - res_str.push_str(&ident.as_str()), + TokenTree::Token(Token { kind: token::Ident(name, _), .. }) => + res_str.push_str(&name.as_str()), _ => { cx.span_err(sp, "concat_idents! requires ident args."); return DummyResult::any(sp); diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index 0eaac544e332a..c78215b77a973 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -149,16 +149,16 @@ fn parse_args<'a>( } // accept trailing commas if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) { named = true; - let ident = if let token::Ident(i, _) = p.token.kind { + let name = if let token::Ident(name, _) = p.token.kind { p.bump(); - i + name } else { return Err(ecx.struct_span_err( p.span, "expected ident, positional arguments cannot follow named arguments", )); }; - let name: &str = &ident.as_str(); + let name: &str = &name.as_str(); p.expect(&token::Eq)?; let e = p.parse_expr()?; diff --git a/src/libsyntax_ext/proc_macro_decls.rs b/src/libsyntax_ext/proc_macro_decls.rs index de8b689396fb9..29297aa913ed4 100644 --- a/src/libsyntax_ext/proc_macro_decls.rs +++ b/src/libsyntax_ext/proc_macro_decls.rs @@ -132,7 +132,7 @@ impl<'a> CollectProcMacros<'a> { } }; - if !trait_ident.can_be_raw() { + if !trait_ident.name.can_be_raw() { self.handler.span_err(trait_attr.span, &format!("`{}` cannot be a name of derive macro", trait_ident)); } @@ -166,7 +166,7 @@ impl<'a> CollectProcMacros<'a> { return None; } }; - if !ident.can_be_raw() { + if !ident.name.can_be_raw() { self.handler.span_err( attr.span, &format!("`{}` cannot be a name of derive helper attribute", ident), diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index 6ab613d2abd71..ff2835c70f75e 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -142,9 +142,8 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> Question => op!('?'), SingleQuote => op!('\''), - Ident(ident, false) if ident.name == kw::DollarCrate => - tt!(Ident::dollar_crate()), - Ident(ident, is_raw) => tt!(Ident::new(ident.name, is_raw)), + Ident(name, false) if name == kw::DollarCrate => tt!(Ident::dollar_crate()), + Ident(name, is_raw) => tt!(Ident::new(name, is_raw)), Lifetime(name) => { let ident = ast::Ident::new(name, span).without_first_quote(); stack.push(tt!(Ident::new(ident.name, false))); @@ -159,7 +158,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> escaped.extend(ch.escape_debug()); } let stream = vec![ - Ident(ast::Ident::new(sym::doc, span), false), + Ident(sym::doc, false), Eq, TokenKind::lit(token::Str, Symbol::intern(&escaped), None), ] @@ -211,8 +210,7 @@ impl ToInternal for TokenTree { .into(); } TokenTree::Ident(self::Ident { sym, is_raw, span }) => { - let token = Ident(ast::Ident::new(sym, span), is_raw); - return tokenstream::TokenTree::token(span, token).into(); + return tokenstream::TokenTree::token(span, Ident(sym, is_raw)).into(); } TokenTree::Literal(self::Literal { lit: token::Lit { kind: token::Integer, symbol, suffix }, @@ -338,7 +336,8 @@ impl Ident { if !Self::is_valid(&string) { panic!("`{:?}` is not a valid identifier", string) } - if is_raw && !ast::Ident::from_interned_str(sym.as_interned_str()).can_be_raw() { + // Get rid of gensyms to conservatively check rawness on the string contents only. + if is_raw && !sym.as_interned_str().as_symbol().can_be_raw() { panic!("`{}` cannot be a raw identifier", string); } Ident { sym, is_raw, span } diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs index 4e080d115d2a8..c37aae0bf3184 100644 --- a/src/libsyntax_pos/symbol.rs +++ b/src/libsyntax_pos/symbol.rs @@ -1019,6 +1019,21 @@ impl Symbol { pub fn is_doc_keyword(self) -> bool { self <= kw::Union } + + /// A keyword or reserved identifier that can be used as a path segment. + pub fn is_path_segment_keyword(self) -> bool { + self == kw::Super || + self == kw::SelfLower || + self == kw::SelfUpper || + self == kw::Crate || + self == kw::PathRoot || + self == kw::DollarCrate + } + + /// This symbol can be a raw identifier. + pub fn can_be_raw(self) -> bool { + self != kw::Invalid && self != kw::Underscore && !self.is_path_segment_keyword() + } } impl Ident { @@ -1049,24 +1064,13 @@ impl Ident { /// A keyword or reserved identifier that can be used as a path segment. pub fn is_path_segment_keyword(self) -> bool { - self.name == kw::Super || - self.name == kw::SelfLower || - self.name == kw::SelfUpper || - self.name == kw::Crate || - self.name == kw::PathRoot || - self.name == kw::DollarCrate - } - - /// This identifier can be a raw identifier. - pub fn can_be_raw(self) -> bool { - self.name != kw::Invalid && self.name != kw::Underscore && - !self.is_path_segment_keyword() + self.name.is_path_segment_keyword() } /// We see this identifier in a normal identifier position, like variable name or a type. /// How was it written originally? Did it use the raw form? Let's try to guess. pub fn is_raw_guess(self) -> bool { - self.can_be_raw() && self.is_reserved() + self.name.can_be_raw() && self.is_reserved() } } From 350a34f85c0ed53315a2114f0001cfea4fe116d9 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 5 Jun 2019 13:24:54 +0300 Subject: [PATCH 10/14] syntax: Use `Token` in some more places --- src/libsyntax/attr/mod.rs | 4 ++-- src/libsyntax/parse/literal.rs | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 39ffabaa4a948..cc16bac320d0e 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -560,7 +560,7 @@ impl MetaItemKind { Some(TokenTree::Token(token)) if token == token::Eq => { tokens.next(); return if let Some(TokenTree::Token(token)) = tokens.next() { - Lit::from_token(&token, token.span).ok().map(MetaItemKind::NameValue) + Lit::from_token(&token).ok().map(MetaItemKind::NameValue) } else { None }; @@ -605,7 +605,7 @@ impl NestedMetaItem { where I: Iterator, { if let Some(TokenTree::Token(token)) = tokens.peek() { - if let Ok(lit) = Lit::from_token(token, token.span) { + if let Ok(lit) = Lit::from_token(token) { tokens.next(); return Some(NestedMetaItem::Literal(lit)); } diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 7b27304071c7f..7b213655abdbf 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -228,8 +228,8 @@ impl Lit { } /// Converts arbitrary token into an AST literal. - crate fn from_token(token: &TokenKind, span: Span) -> Result { - let lit = match *token { + crate fn from_token(token: &Token) -> Result { + let lit = match token.kind { token::Ident(name, false) if name == kw::True || name == kw::False => token::Lit::new(token::Bool, name, None), token::Literal(lit) => @@ -245,7 +245,7 @@ impl Lit { _ => return Err(LitError::NotLiteral) }; - Lit::from_lit_token(lit, span) + Lit::from_lit_token(lit, token.span) } /// Attempts to recover an AST literal from semantic literal. @@ -297,7 +297,7 @@ impl<'a> Parser<'a> { } let token = recovered.as_ref().unwrap_or(&self.token); - match Lit::from_token(token, token.span) { + match Lit::from_token(token) { Ok(lit) => { self.bump(); Ok(lit) From 67ce3f458939e6fe073bca6128526cb23f0797ba Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 5 Jun 2019 13:25:26 +0300 Subject: [PATCH 11/14] syntax: Switch function parameter order in `TokenTree::token` --- src/libsyntax/attr/mod.rs | 10 +++++----- src/libsyntax/ext/base.rs | 5 +++-- src/libsyntax/ext/expand.rs | 4 ++-- src/libsyntax/ext/tt/macro_rules.rs | 10 +++++----- src/libsyntax/ext/tt/quoted.rs | 10 +++++----- src/libsyntax/ext/tt/transcribe.rs | 8 ++++---- src/libsyntax/parse/attr.rs | 2 +- src/libsyntax/parse/literal.rs | 2 +- src/libsyntax/parse/mod.rs | 14 +++++++------- src/libsyntax/parse/parser.rs | 14 +++++++------- src/libsyntax/parse/token.rs | 8 ++++---- src/libsyntax/tokenstream.rs | 18 +++++++++--------- src/libsyntax_ext/assert.rs | 2 +- src/libsyntax_ext/deriving/custom.rs | 2 +- src/libsyntax_ext/proc_macro_server.rs | 16 ++++++++-------- 15 files changed, 63 insertions(+), 62 deletions(-) diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index cc16bac320d0e..8c9bed57bfdfd 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -465,10 +465,10 @@ impl MetaItem { let mod_sep_span = Span::new(last_pos, segment.ident.span.lo(), segment.ident.span.ctxt()); - idents.push(TokenTree::token(mod_sep_span, token::ModSep).into()); + idents.push(TokenTree::token(token::ModSep, mod_sep_span).into()); } - idents.push(TokenTree::token(segment.ident.span, - TokenKind::from_ast_ident(segment.ident)).into()); + idents.push(TokenTree::token(TokenKind::from_ast_ident(segment.ident), + segment.ident.span).into()); last_pos = segment.ident.span.hi(); } self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents); @@ -532,7 +532,7 @@ impl MetaItemKind { match *self { MetaItemKind::Word => TokenStream::empty(), MetaItemKind::NameValue(ref lit) => { - let mut vec = vec![TokenTree::token(span, token::Eq).into()]; + let mut vec = vec![TokenTree::token(token::Eq, span).into()]; lit.tokens().append_to_tree_and_joint_vec(&mut vec); TokenStream::new(vec) } @@ -540,7 +540,7 @@ impl MetaItemKind { let mut tokens = Vec::new(); for (i, item) in list.iter().enumerate() { if i > 0 { - tokens.push(TokenTree::token(span, token::Comma).into()); + tokens.push(TokenTree::token(token::Comma, span).into()); } item.tokens().append_to_tree_and_joint_vec(&mut tokens); } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 3b24837e36598..61c736662c71e 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -268,8 +268,9 @@ impl TTMacroExpander for F if let tokenstream::TokenTree::Token(token) = tt { if let token::Interpolated(nt) = &token.kind { if let token::NtIdent(ident, is_raw) = **nt { - *tt = tokenstream::TokenTree::token(ident.span, - token::Ident(ident.name, is_raw)); + *tt = tokenstream::TokenTree::token( + token::Ident(ident.name, is_raw), ident.span + ); } } } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 4396b9be9bbb0..7cd847eac4690 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -585,14 +585,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } AttrProcMacro(ref mac, ..) => { self.gate_proc_macro_attr_item(attr.span, &item); - let item_tok = TokenTree::token(DUMMY_SP, token::Interpolated(Lrc::new(match item { + let item_tok = TokenTree::token(token::Interpolated(Lrc::new(match item { Annotatable::Item(item) => token::NtItem(item), Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()), Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()), Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()), Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()), Annotatable::Expr(expr) => token::NtExpr(expr), - }))).into(); + })), DUMMY_SP).into(); let input = self.extract_proc_macro_attr_input(attr.tokens, attr.span); let tok_result = mac.expand(self.cx, attr.span, input, item_tok); let res = self.parse_ast_fragment(tok_result, invoc.fragment_kind, diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 77f53c35b0b5e..d25339a78f43c 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -270,7 +270,7 @@ pub fn compile( quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition { tts: vec![ quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")), - quoted::TokenTree::token(DUMMY_SP, token::FatArrow), + quoted::TokenTree::token(token::FatArrow, DUMMY_SP), quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")), ], separator: Some(if body.legacy { token::Semi } else { token::Comma }), @@ -279,7 +279,7 @@ pub fn compile( })), // to phase into semicolon-termination instead of semicolon-separation quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition { - tts: vec![quoted::TokenTree::token(DUMMY_SP, token::Semi)], + tts: vec![quoted::TokenTree::token(token::Semi, DUMMY_SP)], separator: None, op: quoted::KleeneOp::ZeroOrMore, num_captures: 0 @@ -613,7 +613,7 @@ impl FirstSets { if let (Some(ref sep), true) = (seq_rep.separator.clone(), subfirst.maybe_empty) { - first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone())); + first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire())); } // Reverse scan: Sequence comes before `first`. @@ -663,7 +663,7 @@ impl FirstSets { if let (Some(ref sep), true) = (seq_rep.separator.clone(), subfirst.maybe_empty) { - first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone())); + first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire())); } assert!(first.maybe_empty); @@ -869,7 +869,7 @@ fn check_matcher_core(sess: &ParseSess, let mut new; let my_suffix = if let Some(ref u) = seq_rep.separator { new = suffix_first.clone(); - new.add_one_maybe(TokenTree::token(sp.entire(), u.clone())); + new.add_one_maybe(TokenTree::token(u.clone(), sp.entire())); &new } else { &suffix_first diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 582d87b911dbc..b4672fb4a58b7 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -40,7 +40,7 @@ impl Delimited { } else { span.with_lo(span.lo() + BytePos(self.delim.len() as u32)) }; - TokenTree::token(open_span, self.open_token()) + TokenTree::token(self.open_token(), open_span) } /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter. @@ -50,7 +50,7 @@ impl Delimited { } else { span.with_lo(span.hi() - BytePos(self.delim.len() as u32)) }; - TokenTree::token(close_span, self.close_token()) + TokenTree::token(self.close_token(), close_span) } } @@ -153,7 +153,7 @@ impl TokenTree { } } - crate fn token(span: Span, kind: TokenKind) -> TokenTree { + crate fn token(kind: TokenKind, span: Span) -> TokenTree { TokenTree::Token(Token::new(kind, span)) } } @@ -325,7 +325,7 @@ where let (ident, is_raw) = token.ident().unwrap(); let span = ident.span.with_lo(span.lo()); if ident.name == kw::Crate && !is_raw { - TokenTree::token(span, token::Ident(kw::DollarCrate, is_raw)) + TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span) } else { TokenTree::MetaVar(span, ident) } @@ -342,7 +342,7 @@ where } // There are no more tokens. Just return the `$` we already have. - None => TokenTree::token(span, token::Dollar), + None => TokenTree::token(token::Dollar, span), }, // `tree` is an arbitrary token. Keep it. diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 1dbb0638df195..b382893ce4ece 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -119,7 +119,7 @@ pub fn transcribe( Some((tt, _)) => tt.span(), None => DUMMY_SP, }; - result.push(TokenTree::token(prev_span, sep).into()); + result.push(TokenTree::token(sep, prev_span).into()); } continue; } @@ -225,7 +225,7 @@ pub fn transcribe( result.push(tt.clone().into()); } else { sp = sp.apply_mark(cx.current_expansion.mark); - let token = TokenTree::token(sp, token::Interpolated(nt.clone())); + let token = TokenTree::token(token::Interpolated(nt.clone()), sp); result.push(token.into()); } } else { @@ -241,8 +241,8 @@ pub fn transcribe( let ident = Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark)); sp = sp.apply_mark(cx.current_expansion.mark); - result.push(TokenTree::token(sp, token::Dollar).into()); - result.push(TokenTree::token(sp, token::TokenKind::from_ast_ident(ident)).into()); + result.push(TokenTree::token(token::Dollar, sp).into()); + result.push(TokenTree::token(token::TokenKind::from_ast_ident(ident), sp).into()); } } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 8040168a67ec3..d83b76f4d2366 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -157,7 +157,7 @@ impl<'a> Parser<'a> { self.check(&token::OpenDelim(DelimToken::Brace)) { self.parse_token_tree().into() } else if self.eat(&token::Eq) { - let eq = TokenTree::token(self.prev_span, token::Eq); + let eq = TokenTree::token(token::Eq, self.prev_span); let mut is_interpolated_expr = false; if let token::Interpolated(nt) = &self.token.kind { if let token::NtExpr(..) = **nt { diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 7b213655abdbf..4979a4dd27f4a 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -261,7 +261,7 @@ impl Lit { token::Bool => token::Ident(self.token.symbol, false), _ => token::Literal(self.token), }; - TokenTree::token(self.span, token).into() + TokenTree::token(token, self.span).into() } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 2b82767d7e91d..8d3518d037368 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -476,23 +476,23 @@ mod tests { let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); let expected = TokenStream::new(vec![ - TokenTree::token(sp(0, 2), token::Ident(kw::Fn, false)).into(), - TokenTree::token(sp(3, 4), token::Ident(Name::intern("a"), false)).into(), + TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(), + TokenTree::token(token::Ident(Name::intern("a"), false), sp(3, 4)).into(), TokenTree::Delimited( DelimSpan::from_pair(sp(5, 6), sp(13, 14)), token::DelimToken::Paren, TokenStream::new(vec![ - TokenTree::token(sp(6, 7), token::Ident(Name::intern("b"), false)).into(), - TokenTree::token(sp(8, 9), token::Colon).into(), - TokenTree::token(sp(10, 13), token::Ident(sym::i32, false)).into(), + TokenTree::token(token::Ident(Name::intern("b"), false), sp(6, 7)).into(), + TokenTree::token(token::Colon, sp(8, 9)).into(), + TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(), ]).into(), ).into(), TokenTree::Delimited( DelimSpan::from_pair(sp(15, 16), sp(20, 21)), token::DelimToken::Brace, TokenStream::new(vec![ - TokenTree::token(sp(17, 18), token::Ident(Name::intern("b"), false)).into(), - TokenTree::token(sp(18, 19), token::Semi).into(), + TokenTree::token(token::Ident(Name::intern("b"), false), sp(17, 18)).into(), + TokenTree::token(token::Semi, sp(18, 19)).into(), ]).into(), ).into() ]); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 57a49d1524d81..e9e908eb858c8 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -362,11 +362,11 @@ impl TokenCursor { delim_span, token::Bracket, [ - TokenTree::token(sp, token::Ident(sym::doc, false)), - TokenTree::token(sp, token::Eq), - TokenTree::token(sp, token::TokenKind::lit( + TokenTree::token(token::Ident(sym::doc, false), sp), + TokenTree::token(token::Eq, sp), + TokenTree::token(token::TokenKind::lit( token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None - )), + ), sp), ] .iter().cloned().collect::().into(), ); @@ -375,10 +375,10 @@ impl TokenCursor { delim_span, token::NoDelim, &if doc_comment_style(&name.as_str()) == AttrStyle::Inner { - [TokenTree::token(sp, token::Pound), TokenTree::token(sp, token::Not), body] + [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body] .iter().cloned().collect::().into() } else { - [TokenTree::token(sp, token::Pound), body] + [TokenTree::token(token::Pound, sp), body] .iter().cloned().collect::().into() }, ))); @@ -4344,7 +4344,7 @@ impl<'a> Parser<'a> { }; TokenStream::new(vec![ args.into(), - TokenTree::token(token_lo.to(self.prev_span), token::FatArrow).into(), + TokenTree::token(token::FatArrow, token_lo.to(self.prev_span)).into(), body.into(), ]) } else { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index ba7c88e700074..58c30a07e3e1b 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -763,10 +763,10 @@ impl Nonterminal { prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) } Nonterminal::NtIdent(ident, is_raw) => { - Some(TokenTree::token(ident.span, Ident(ident.name, is_raw)).into()) + Some(TokenTree::token(Ident(ident.name, is_raw), ident.span).into()) } Nonterminal::NtLifetime(ident) => { - Some(TokenTree::token(ident.span, Lifetime(ident.name)).into()) + Some(TokenTree::token(Lifetime(ident.name), ident.span).into()) } Nonterminal::NtTT(ref tt) => { Some(tt.clone().into()) @@ -852,7 +852,7 @@ fn prepend_attrs(sess: &ParseSess, if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() { let ident = attr.path.segments[0].ident; let token = Ident(ident.name, ident.as_str().starts_with("r#")); - brackets.push(tokenstream::TokenTree::token(ident.span, token)); + brackets.push(tokenstream::TokenTree::token(token, ident.span)); // ... and for more complicated paths, fall back to a reparse hack that // should eventually be removed. @@ -866,7 +866,7 @@ fn prepend_attrs(sess: &ParseSess, // The span we list here for `#` and for `[ ... ]` are both wrong in // that it encompasses more than each token, but it hopefully is "good // enough" for now at least. - builder.push(tokenstream::TokenTree::token(attr.span, Pound)); + builder.push(tokenstream::TokenTree::token(Pound, attr.span)); let delim_span = DelimSpan::from_single(attr.span); builder.push(tokenstream::TokenTree::Delimited( delim_span, DelimToken::Bracket, brackets.build().into())); diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index bb80c1a1b3f33..b4643229285cd 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -138,7 +138,7 @@ impl TokenTree { TokenStream::new(vec![(self, Joint)]) } - pub fn token(span: Span, kind: TokenKind) -> TokenTree { + pub fn token(kind: TokenKind, span: Span) -> TokenTree { TokenTree::Token(Token::new(kind, span)) } @@ -149,7 +149,7 @@ impl TokenTree { } else { span.with_hi(span.lo() + BytePos(delim.len() as u32)) }; - TokenTree::token(open_span, token::OpenDelim(delim)) + TokenTree::token(token::OpenDelim(delim), open_span) } /// Returns the closing delimiter as a token tree. @@ -159,7 +159,7 @@ impl TokenTree { } else { span.with_lo(span.hi() - BytePos(delim.len() as u32)) }; - TokenTree::token(close_span, token::CloseDelim(delim)) + TokenTree::token(token::CloseDelim(delim), close_span) } } @@ -212,7 +212,7 @@ impl TokenStream { _ => continue, }; let sp = sp.shrink_to_hi(); - let comma = (TokenTree::token(sp, token::Comma), NonJoint); + let comma = (TokenTree::token(token::Comma, sp), NonJoint); suggestion = Some((pos, comma, sp)); } } @@ -433,7 +433,7 @@ impl TokenStreamBuilder { let last_stream = self.0.pop().unwrap(); self.push_all_but_last_tree(&last_stream); let glued_span = last_token.span.to(token.span); - let glued_tt = TokenTree::token(glued_span, glued_tok); + let glued_tt = TokenTree::token(glued_tok, glued_span); let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]); self.0.push(glued_tokenstream); self.push_all_but_first_tree(&stream); @@ -660,7 +660,7 @@ mod tests { with_default_globals(|| { let test0: TokenStream = Vec::::new().into_iter().collect(); let test1: TokenStream = - TokenTree::token(sp(0, 1), token::Ident(Name::intern("a"), false)).into(); + TokenTree::token(token::Ident(Name::intern("a"), false), sp(0, 1)).into(); let test2 = string_to_ts("foo(bar::baz)"); assert_eq!(test0.is_empty(), true); @@ -673,9 +673,9 @@ mod tests { fn test_dotdotdot() { with_default_globals(|| { let mut builder = TokenStreamBuilder::new(); - builder.push(TokenTree::token(sp(0, 1), token::Dot).joint()); - builder.push(TokenTree::token(sp(1, 2), token::Dot).joint()); - builder.push(TokenTree::token(sp(2, 3), token::Dot)); + builder.push(TokenTree::token(token::Dot, sp(0, 1)).joint()); + builder.push(TokenTree::token(token::Dot, sp(1, 2)).joint()); + builder.push(TokenTree::token(token::Dot, sp(2, 3))); let stream = builder.build(); assert!(stream.eq_unspanned(&string_to_ts("..."))); assert_eq!(stream.trees().count(), 1); diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs index e5e422c4d9c77..ce1e3276af39b 100644 --- a/src/libsyntax_ext/assert.rs +++ b/src/libsyntax_ext/assert.rs @@ -30,11 +30,11 @@ pub fn expand_assert<'cx>( path: Path::from_ident(Ident::new(sym::panic, sp)), tts: custom_message.unwrap_or_else(|| { TokenStream::from(TokenTree::token( - DUMMY_SP, TokenKind::lit(token::Str, Symbol::intern(&format!( "assertion failed: {}", pprust::expr_to_string(&cond_expr).escape_debug() )), None), + DUMMY_SP, )) }).into(), delim: MacDelimiter::Parenthesis, diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs index 3deab97db88c0..98465d75e4680 100644 --- a/src/libsyntax_ext/deriving/custom.rs +++ b/src/libsyntax_ext/deriving/custom.rs @@ -69,7 +69,7 @@ impl MultiItemModifier for ProcMacroDerive { MarkAttrs(&self.attrs).visit_item(&item); let token = token::Interpolated(Lrc::new(token::NtItem(item))); - let input = tokenstream::TokenTree::token(DUMMY_SP, token).into(); + let input = tokenstream::TokenTree::token(token, DUMMY_SP).into(); let server = proc_macro_server::Rustc::new(ecx); let stream = match self.client.run(&EXEC_STRATEGY, server, input) { diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index ff2835c70f75e..00a420d3fa899 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -163,7 +163,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> TokenKind::lit(token::Str, Symbol::intern(&escaped), None), ] .into_iter() - .map(|kind| tokenstream::TokenTree::token(span, kind)) + .map(|kind| tokenstream::TokenTree::token(kind, span)) .collect(); stack.push(TokenTree::Group(Group { delimiter: Delimiter::Bracket, @@ -210,7 +210,7 @@ impl ToInternal for TokenTree { .into(); } TokenTree::Ident(self::Ident { sym, is_raw, span }) => { - return tokenstream::TokenTree::token(span, Ident(sym, is_raw)).into(); + return tokenstream::TokenTree::token(Ident(sym, is_raw), span).into(); } TokenTree::Literal(self::Literal { lit: token::Lit { kind: token::Integer, symbol, suffix }, @@ -219,8 +219,8 @@ impl ToInternal for TokenTree { let minus = BinOp(BinOpToken::Minus); let symbol = Symbol::intern(&symbol.as_str()[1..]); let integer = TokenKind::lit(token::Integer, symbol, suffix); - let a = tokenstream::TokenTree::token(span, minus); - let b = tokenstream::TokenTree::token(span, integer); + let a = tokenstream::TokenTree::token(minus, span); + let b = tokenstream::TokenTree::token(integer, span); return vec![a, b].into_iter().collect(); } TokenTree::Literal(self::Literal { @@ -230,12 +230,12 @@ impl ToInternal for TokenTree { let minus = BinOp(BinOpToken::Minus); let symbol = Symbol::intern(&symbol.as_str()[1..]); let float = TokenKind::lit(token::Float, symbol, suffix); - let a = tokenstream::TokenTree::token(span, minus); - let b = tokenstream::TokenTree::token(span, float); + let a = tokenstream::TokenTree::token(minus, span); + let b = tokenstream::TokenTree::token(float, span); return vec![a, b].into_iter().collect(); } TokenTree::Literal(self::Literal { lit, span }) => { - return tokenstream::TokenTree::token(span, Literal(lit)).into() + return tokenstream::TokenTree::token(Literal(lit), span).into() } }; @@ -265,7 +265,7 @@ impl ToInternal for TokenTree { _ => unreachable!(), }; - let tree = tokenstream::TokenTree::token(span, kind); + let tree = tokenstream::TokenTree::token(kind, span); TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })]) } } From 738e14565deb48800c06abc22f8e35e412f10010 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 5 Jun 2019 13:54:54 +0300 Subject: [PATCH 12/14] syntax: Use `Token` in visitors and fix a mut visitor test --- src/librustc/hir/map/def_collector.rs | 6 +++--- src/librustc_resolve/build_reduced_graph.rs | 6 +++--- src/libsyntax/mut_visit.rs | 24 ++++++++++++++------- src/libsyntax/visit.rs | 6 +++--- 4 files changed, 25 insertions(+), 17 deletions(-) diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index b9a80ebb78f20..41073773e9f9b 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -6,7 +6,7 @@ use syntax::ast::*; use syntax::ext::hygiene::Mark; use syntax::visit; use syntax::symbol::{kw, sym}; -use syntax::parse::token::{self, TokenKind}; +use syntax::parse::token::{self, Token}; use syntax_pos::Span; /// Creates `DefId`s for nodes in the AST. @@ -325,8 +325,8 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { } } - fn visit_token(&mut self, t: TokenKind) { - if let token::Interpolated(nt) = t { + fn visit_token(&mut self, t: Token) { + if let token::Interpolated(nt) = t.kind { if let token::NtExpr(ref expr) = *nt { if let ExprKind::Mac(..) = expr.node { self.visit_macro_invoc(expr.id); diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index a7a78a69952f4..6d0b142fb2409 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -34,7 +34,7 @@ use syntax::ext::base::Determinacy::Undetermined; use syntax::ext::hygiene::Mark; use syntax::ext::tt::macro_rules; use syntax::feature_gate::is_builtin_attr; -use syntax::parse::token::{self, TokenKind}; +use syntax::parse::token::{self, Token}; use syntax::span_err; use syntax::std_inject::injected_crate_name; use syntax::symbol::{kw, sym}; @@ -1052,8 +1052,8 @@ impl<'a, 'b> Visitor<'a> for BuildReducedGraphVisitor<'a, 'b> { self.resolver.current_module = parent; } - fn visit_token(&mut self, t: TokenKind) { - if let token::Interpolated(nt) = t { + fn visit_token(&mut self, t: Token) { + if let token::Interpolated(nt) = t.kind { if let token::NtExpr(ref expr) = *nt { if let ast::ExprKind::Mac(..) = expr.node { self.visit_invoc(expr.id); diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index 7eb88de2281da..4d7f0a97b0fa5 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -9,7 +9,7 @@ use crate::ast::*; use crate::source_map::{Spanned, respan}; -use crate::parse::token::{self, Token, TokenKind}; +use crate::parse::token::{self, Token}; use crate::ptr::P; use crate::ThinVec; use crate::tokenstream::*; @@ -262,7 +262,7 @@ pub trait MutVisitor: Sized { noop_visit_tts(tts, self); } - fn visit_token(&mut self, t: &mut TokenKind) { + fn visit_token(&mut self, t: &mut Token) { noop_visit_token(t, self); } @@ -576,9 +576,8 @@ pub fn noop_visit_arg(Arg { id, pat, ty }: &mut Arg, vis: &mut T) pub fn noop_visit_tt(tt: &mut TokenTree, vis: &mut T) { match tt { - TokenTree::Token(Token { kind, span }) => { - vis.visit_token(kind); - vis.visit_span(span); + TokenTree::Token(token) => { + vis.visit_token(token); } TokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => { vis.visit_span(open); @@ -595,15 +594,24 @@ pub fn noop_visit_tts(TokenStream(tts): &mut TokenStream, vis: &m }) } -// apply ident visitor if it's an ident, apply other visits to interpolated nodes -pub fn noop_visit_token(t: &mut TokenKind, vis: &mut T) { - match t { +// Apply ident visitor if it's an ident, apply other visits to interpolated nodes. +// In practice the ident part is not actually used by specific visitors right now, +// but there's a test below checking that it works. +pub fn noop_visit_token(t: &mut Token, vis: &mut T) { + let Token { kind, span } = t; + match kind { + token::Ident(name, _) | token::Lifetime(name) => { + let mut ident = Ident::new(*name, *span); + vis.visit_ident(&mut ident); + *name = ident.name; + } token::Interpolated(nt) => { let mut nt = Lrc::make_mut(nt); vis.visit_interpolated(&mut nt); } _ => {} } + vis.visit_span(span); } /// Apply visitor to elements of interpolated nodes. diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index e32c5f3f3ecad..4e6a8274a478c 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -14,7 +14,7 @@ //! those that are created by the expansion of a macro. use crate::ast::*; -use crate::parse::token::TokenKind; +use crate::parse::token::Token; use crate::tokenstream::{TokenTree, TokenStream}; use syntax_pos::Span; @@ -151,7 +151,7 @@ pub trait Visitor<'ast>: Sized { fn visit_tts(&mut self, tts: TokenStream) { walk_tts(self, tts) } - fn visit_token(&mut self, _t: TokenKind) {} + fn visit_token(&mut self, _t: Token) {} // FIXME: add `visit_interpolated` and `walk_interpolated` fn visit_vis(&mut self, vis: &'ast Visibility) { walk_vis(self, vis) @@ -855,7 +855,7 @@ pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute) pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) { match tt { - TokenTree::Token(token) => visitor.visit_token(token.kind), + TokenTree::Token(token) => visitor.visit_token(token), TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts), } } From ff40e37b98fb44366a329d1b0d9642d462cc6ab6 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 5 Jun 2019 14:17:56 +0300 Subject: [PATCH 13/14] Some code cleanup and tidy/test fixes --- .../src/language-features/plugin.md | 6 +- src/librustc_lint/builtin.rs | 12 +--- src/libsyntax/attr/mod.rs | 9 ++- src/libsyntax/diagnostics/plugin.rs | 4 +- src/libsyntax/early_buffered_lints.rs | 2 +- src/libsyntax/ext/tt/quoted.rs | 41 +++++++------- src/libsyntax/ext/tt/transcribe.rs | 2 +- src/libsyntax/lib.rs | 6 -- src/libsyntax/parse/diagnostics.rs | 15 ++--- src/libsyntax/parse/literal.rs | 3 +- src/libsyntax/parse/mod.rs | 19 +++++-- src/libsyntax/parse/parser.rs | 55 ++++++++++--------- src/libsyntax/parse/token.rs | 3 +- src/libsyntax/tokenstream.rs | 5 +- src/libsyntax_ext/assert.rs | 3 +- src/libsyntax_pos/symbol.rs | 5 +- .../auxiliary/roman-numerals.rs | 20 +++---- 17 files changed, 109 insertions(+), 101 deletions(-) diff --git a/src/doc/unstable-book/src/language-features/plugin.md b/src/doc/unstable-book/src/language-features/plugin.md index 43fffd680372f..1994cf491889b 100644 --- a/src/doc/unstable-book/src/language-features/plugin.md +++ b/src/doc/unstable-book/src/language-features/plugin.md @@ -56,7 +56,7 @@ extern crate syntax_pos; extern crate rustc; extern crate rustc_plugin; -use syntax::parse::token; +use syntax::parse::token::{self, Token}; use syntax::tokenstream::TokenTree; use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager}; use syntax::ext::build::AstBuilder; // A trait for expr_usize. @@ -64,7 +64,7 @@ use syntax_pos::Span; use rustc_plugin::Registry; fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) - -> Box { + -> Box { static NUMERALS: &'static [(&'static str, usize)] = &[ ("M", 1000), ("CM", 900), ("D", 500), ("CD", 400), @@ -80,7 +80,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) } let text = match args[0] { - TokenTree::Token(_, token::Ident(s)) => s.to_string(), + TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(), _ => { cx.span_err(sp, "argument should be a single identifier"); return DummyResult::any(sp); diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index a3da97bd5db1e..6e4d0e881f76b 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -1414,15 +1414,9 @@ impl KeywordIdents { fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: TokenStream) { for tt in tokens.into_trees() { match tt { - TokenTree::Token(token) => match token.ident() { - // only report non-raw idents - Some((ident, false)) => { - self.check_ident_token(cx, UnderMacro(true), ast::Ident { - span: token.span.substitute_dummy(ident.span), - ..ident - }); - } - _ => {}, + // Only report non-raw idents. + TokenTree::Token(token) => if let Some((ident, false)) = token.ident() { + self.check_ident_token(cx, UnderMacro(true), ident); } TokenTree::Delimited(_, _, tts) => { self.check_tokens(cx, tts) diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 8c9bed57bfdfd..edfe097c72f61 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -483,7 +483,8 @@ impl MetaItem { Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) | Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: { let mut segments = if let token::Ident(name, _) = kind { - if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() { + if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) + = tokens.peek() { tokens.next(); vec![PathSegment::from_ident(Ident::new(name, span))] } else { @@ -493,12 +494,14 @@ impl MetaItem { vec![PathSegment::path_root(span)] }; loop { - if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span })) = tokens.next() { + if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span })) + = tokens.next() { segments.push(PathSegment::from_ident(Ident::new(name, span))); } else { return None; } - if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() { + if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) + = tokens.peek() { tokens.next(); } else { break; diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 8d9848d98fb21..9f01b9b9f9b58 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -77,7 +77,9 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>, }, (3, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })), Some(&TokenTree::Token(Token { kind: token::Comma, .. })), - Some(&TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), .. }))) => { + Some(&TokenTree::Token(Token { + kind: token::Literal(token::Lit { symbol, .. }), .. + }))) => { (code, Some(symbol)) } _ => unreachable!() diff --git a/src/libsyntax/early_buffered_lints.rs b/src/libsyntax/early_buffered_lints.rs index 977e6d4587709..598c8459d1590 100644 --- a/src/libsyntax/early_buffered_lints.rs +++ b/src/libsyntax/early_buffered_lints.rs @@ -3,7 +3,7 @@ //! Since we cannot have a dependency on `librustc`, we implement some types here that are somewhat //! redundant. Later, these types can be converted to types for use by the rest of the compiler. -use crate::syntax::ast::NodeId; +use crate::ast::NodeId; use syntax_pos::MultiSpan; /// Since we cannot import `LintId`s from `rustc::lint`, we define some Ids here which can later be diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index b4672fb4a58b7..ec7d7f705d893 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -24,12 +24,12 @@ pub struct Delimited { impl Delimited { /// Returns the opening delimiter (possibly `NoDelim`). - pub fn open_token(&self) -> token::TokenKind { + pub fn open_token(&self) -> TokenKind { token::OpenDelim(self.delim) } /// Returns the closing delimiter (possibly `NoDelim`). - pub fn close_token(&self) -> token::TokenKind { + pub fn close_token(&self) -> TokenKind { token::CloseDelim(self.delim) } @@ -59,7 +59,7 @@ pub struct SequenceRepetition { /// The sequence of token trees pub tts: Vec, /// The optional separator - pub separator: Option, + pub separator: Option, /// Whether the sequence can be repeated zero (*), or one or more times (+) pub op: KleeneOp, /// The number of `Match`s that appear in the sequence (and subsequences) @@ -210,20 +210,21 @@ pub fn parse( match tree { TokenTree::MetaVar(start_sp, ident) if expect_matchers => { let span = match trees.next() { - Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => match trees.next() { - Some(tokenstream::TokenTree::Token(token)) => match token.ident() { - Some((kind, _)) => { - let span = token.span.with_lo(start_sp.lo()); - result.push(TokenTree::MetaVarDecl(span, ident, kind)); - continue; - } - _ => token.span, + Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => + match trees.next() { + Some(tokenstream::TokenTree::Token(token)) => match token.ident() { + Some((kind, _)) => { + let span = token.span.with_lo(start_sp.lo()); + result.push(TokenTree::MetaVarDecl(span, ident, kind)); + continue; + } + _ => token.span, + }, + tree => tree + .as_ref() + .map(tokenstream::TokenTree::span) + .unwrap_or(span), }, - tree => tree - .as_ref() - .map(tokenstream::TokenTree::span) - .unwrap_or(span), - }, tree => tree .as_ref() .map(tokenstream::TokenTree::span) @@ -370,7 +371,7 @@ where /// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return /// `None`. -fn kleene_op(token: &token::TokenKind) -> Option { +fn kleene_op(token: &TokenKind) -> Option { match *token { token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore), token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore), @@ -423,7 +424,7 @@ fn parse_sep_and_kleene_op( attrs: &[ast::Attribute], edition: Edition, macro_node_id: NodeId, -) -> (Option, KleeneOp) +) -> (Option, KleeneOp) where I: Iterator, { @@ -448,7 +449,7 @@ fn parse_sep_and_kleene_op_2015( _features: &Features, _attrs: &[ast::Attribute], macro_node_id: NodeId, -) -> (Option, KleeneOp) +) -> (Option, KleeneOp) where I: Iterator, { @@ -566,7 +567,7 @@ fn parse_sep_and_kleene_op_2018( sess: &ParseSess, _features: &Features, _attrs: &[ast::Attribute], -) -> (Option, KleeneOp) +) -> (Option, KleeneOp) where I: Iterator, { diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index b382893ce4ece..90a9cc8f34d2d 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -242,7 +242,7 @@ pub fn transcribe( Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark)); sp = sp.apply_mark(cx.current_expansion.mark); result.push(TokenTree::token(token::Dollar, sp).into()); - result.push(TokenTree::token(token::TokenKind::from_ast_ident(ident), sp).into()); + result.push(TokenTree::token(TokenKind::from_ast_ident(ident), sp).into()); } } diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 6882586ed2cd2..c69364d4e19bb 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -137,12 +137,6 @@ pub mod util { pub mod json; -pub mod syntax { - pub use crate::ext; - pub use crate::parse; - pub use crate::ast; -} - pub mod ast; pub mod attr; pub mod source_map; diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs index 7830b2ce880c2..7f0bf4a90508b 100644 --- a/src/libsyntax/parse/diagnostics.rs +++ b/src/libsyntax/parse/diagnostics.rs @@ -2,8 +2,9 @@ use crate::ast::{ self, Arg, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind, Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, VariantData, }; -use crate::parse::{SeqSep, token, PResult, Parser}; +use crate::parse::{SeqSep, PResult, Parser}; use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType}; +use crate::parse::token::{self, TokenKind}; use crate::print::pprust; use crate::ptr::P; use crate::source_map::Spanned; @@ -229,8 +230,8 @@ impl<'a> Parser<'a> { pub fn expected_one_of_not_found( &mut self, - edible: &[token::TokenKind], - inedible: &[token::TokenKind], + edible: &[TokenKind], + inedible: &[TokenKind], ) -> PResult<'a, bool /* recovered */> { fn tokens_to_string(tokens: &[TokenType]) -> String { let mut i = tokens.iter(); @@ -368,7 +369,7 @@ impl<'a> Parser<'a> { /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. - crate fn eat_to_tokens(&mut self, kets: &[&token::TokenKind]) { + crate fn eat_to_tokens(&mut self, kets: &[&TokenKind]) { let handler = self.diagnostic(); if let Err(ref mut err) = self.parse_seq_to_before_tokens( @@ -388,7 +389,7 @@ impl<'a> Parser<'a> { /// let _ = vec![1, 2, 3].into_iter().collect::>>>(); /// ^^ help: remove extra angle brackets /// ``` - crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::TokenKind) { + crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) { // This function is intended to be invoked after parsing a path segment where there are two // cases: // @@ -726,7 +727,7 @@ impl<'a> Parser<'a> { /// closing delimiter. pub fn unexpected_try_recover( &mut self, - t: &token::TokenKind, + t: &TokenKind, ) -> PResult<'a, bool /* recovered */> { let token_str = pprust::token_to_string(t); let this_token_str = self.this_token_descr(); @@ -903,7 +904,7 @@ impl<'a> Parser<'a> { crate fn recover_closing_delimiter( &mut self, - tokens: &[token::TokenKind], + tokens: &[TokenKind], mut err: DiagnosticBuilder<'a>, ) -> PResult<'a, bool> { let mut pos = None; diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 4979a4dd27f4a..7d5356ffe4d8d 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -272,7 +272,8 @@ impl<'a> Parser<'a> { if self.token == token::Dot { // Attempt to recover `.4` as `0.4`. recovered = self.look_ahead(1, |t| { - if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = t.kind { + if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) + = t.kind { let next_span = self.look_ahead_span(1); if self.span.hi() == next_span.lo() { let s = String::from("0.") + &symbol.as_str(); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 8d3518d037368..063823bbf4d11 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -5,7 +5,8 @@ use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId}; use crate::source_map::{SourceMap, FilePathMapping}; use crate::feature_gate::UnstableFeatures; use crate::parse::parser::Parser; -use crate::syntax::parse::parser::emit_unclosed_delims; +use crate::parse::parser::emit_unclosed_delims; +use crate::parse::token::TokenKind; use crate::tokenstream::{TokenStream, TokenTree}; use crate::diagnostics::plugin::ErrorMap; use crate::print::pprust::token_to_string; @@ -358,13 +359,13 @@ pub fn stream_to_parser_with_base_dir<'a>( /// A sequence separator. pub struct SeqSep { /// The seperator token. - pub sep: Option, + pub sep: Option, /// `true` if a trailing separator is allowed. pub trailing_sep_allowed: bool, } impl SeqSep { - pub fn trailing_allowed(t: token::TokenKind) -> SeqSep { + pub fn trailing_allowed(t: TokenKind) -> SeqSep { SeqSep { sep: Some(t), trailing_sep_allowed: true, @@ -426,7 +427,9 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { ( 4, - Some(&TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. })), + Some(&TokenTree::Token(Token { + kind: token::Ident(name_macro_rules, false), .. + })), Some(&TokenTree::Token(Token { kind: token::Not, .. })), Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })), Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)), @@ -446,7 +449,9 @@ mod tests { ( 2, Some(&TokenTree::Token(Token { kind: token::Dollar, .. })), - Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })), + Some(&TokenTree::Token(Token { + kind: token::Ident(name, false), .. + })), ) if first_delim == token::Paren && name.as_str() == "a" => {}, _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), @@ -456,7 +461,9 @@ mod tests { ( 2, Some(&TokenTree::Token(Token { kind: token::Dollar, .. })), - Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })), + Some(&TokenTree::Token(Token { + kind: token::Ident(name, false), .. + })), ) if second_delim == token::Paren && name.as_str() == "a" => {}, _ => panic!("value 4: {:?} {:?}", second_delim, second_tts), diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index e9e908eb858c8..51bfe3527cf4d 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -38,7 +38,7 @@ use crate::source_map::{self, SourceMap, Spanned, respan}; use crate::parse::{SeqSep, classify, literal, token}; use crate::parse::lexer::UnmatchedBrace; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; -use crate::parse::token::{Token, DelimToken}; +use crate::parse::token::{Token, TokenKind, DelimToken}; use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; use crate::util::parser::{AssocOp, Fixity}; use crate::print::pprust; @@ -337,8 +337,8 @@ impl TokenCursor { } fn next_desugared(&mut self) -> Token { - let (sp, name) = match self.next() { - Token { span, kind: token::DocComment(name) } => (span, name), + let (name, sp) = match self.next() { + Token { kind: token::DocComment(name), span } => (name, span), tok => return tok, }; @@ -364,7 +364,7 @@ impl TokenCursor { [ TokenTree::token(token::Ident(sym::doc, false), sp), TokenTree::token(token::Eq, sp), - TokenTree::token(token::TokenKind::lit( + TokenTree::token(TokenKind::lit( token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None ), sp), ] @@ -389,7 +389,7 @@ impl TokenCursor { #[derive(Clone, PartialEq)] crate enum TokenType { - Token(token::TokenKind), + Token(TokenKind), Keyword(Symbol), Operator, Lifetime, @@ -419,7 +419,7 @@ impl TokenType { /// /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes /// that `IDENT` is not the ident of a fn trait. -fn can_continue_type_after_non_fn_ident(t: &token::TokenKind) -> bool { +fn can_continue_type_after_non_fn_ident(t: &TokenKind) -> bool { t == &token::ModSep || t == &token::Lt || t == &token::BinOp(token::Shl) } @@ -565,7 +565,7 @@ impl<'a> Parser<'a> { } /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. - pub fn expect(&mut self, t: &token::TokenKind) -> PResult<'a, bool /* recovered */> { + pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); @@ -583,8 +583,8 @@ impl<'a> Parser<'a> { /// anything. Signal a fatal error if next token is unexpected. pub fn expect_one_of( &mut self, - edible: &[token::TokenKind], - inedible: &[token::TokenKind], + edible: &[TokenKind], + inedible: &[TokenKind], ) -> PResult<'a, bool /* recovered */> { if edible.contains(&self.token) { self.bump(); @@ -646,14 +646,14 @@ impl<'a> Parser<'a> { /// /// This method will automatically add `tok` to `expected_tokens` if `tok` is not /// encountered. - crate fn check(&mut self, tok: &token::TokenKind) -> bool { + crate fn check(&mut self, tok: &TokenKind) -> bool { let is_present = self.token == *tok; if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); } is_present } /// Consumes a token 'tok' if it exists. Returns whether the given token was present. - pub fn eat(&mut self, tok: &token::TokenKind) -> bool { + pub fn eat(&mut self, tok: &TokenKind) -> bool { let is_present = self.check(tok); if is_present { self.bump() } is_present @@ -889,7 +889,7 @@ impl<'a> Parser<'a> { /// `f` must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_seq_to_end(&mut self, - ket: &token::TokenKind, + ket: &TokenKind, sep: SeqSep, f: F) -> PResult<'a, Vec> where @@ -907,7 +907,7 @@ impl<'a> Parser<'a> { /// closing bracket. pub fn parse_seq_to_before_end( &mut self, - ket: &token::TokenKind, + ket: &TokenKind, sep: SeqSep, f: F, ) -> PResult<'a, (Vec, bool)> @@ -918,7 +918,7 @@ impl<'a> Parser<'a> { crate fn parse_seq_to_before_tokens( &mut self, - kets: &[&token::TokenKind], + kets: &[&TokenKind], sep: SeqSep, expect: TokenExpectType, mut f: F, @@ -992,8 +992,8 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_unspanned_seq( &mut self, - bra: &token::TokenKind, - ket: &token::TokenKind, + bra: &TokenKind, + ket: &TokenKind, sep: SeqSep, f: F, ) -> PResult<'a, Vec> where @@ -1036,7 +1036,7 @@ impl<'a> Parser<'a> { /// Advance the parser using provided token as a next one. Use this when /// consuming a part of a token. For example a single `<` from `<<`. - fn bump_with(&mut self, next: token::TokenKind, span: Span) { + fn bump_with(&mut self, next: TokenKind, span: Span) { self.prev_span = self.span.with_hi(span.lo()); // It would be incorrect to record the kind of the current token, but // fortunately for tokens currently using `bump_with`, the @@ -1050,7 +1050,6 @@ impl<'a> Parser<'a> { F: FnOnce(&Token) -> R, { if dist == 0 { - // FIXME: Avoid cloning here. return f(&self.token); } @@ -1058,7 +1057,8 @@ impl<'a> Parser<'a> { f(&match frame.tree_cursor.look_ahead(dist - 1) { Some(tree) => match tree { TokenTree::Token(token) => token, - TokenTree::Delimited(dspan, delim, _) => Token::new(token::OpenDelim(delim), dspan.open), + TokenTree::Delimited(dspan, delim, _) => + Token::new(token::OpenDelim(delim), dspan.open), } None => Token::new(token::CloseDelim(frame.delim), frame.span.close) }) @@ -1768,7 +1768,7 @@ impl<'a> Parser<'a> { fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> { let ident = self.parse_path_segment_ident()?; - let is_args_start = |token: &token::TokenKind| match *token { + let is_args_start = |token: &TokenKind| match *token { token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren) | token::LArrow => true, _ => false, @@ -1864,7 +1864,8 @@ impl<'a> Parser<'a> { } fn parse_field_name(&mut self) -> PResult<'a, Ident> { - if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind { + if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = + self.token.kind { self.expect_no_suffix(self.span, "a tuple index", suffix); self.bump(); Ok(Ident::new(symbol, self.prev_span)) @@ -2649,8 +2650,10 @@ impl<'a> Parser<'a> { // Interpolated identifier and lifetime tokens are replaced with usual identifier // and lifetime tokens, so the former are never encountered during normal parsing. match **nt { - token::NtIdent(ident, is_raw) => Token::new(token::Ident(ident.name, is_raw), ident.span), - token::NtLifetime(ident) => Token::new(token::Lifetime(ident.name), ident.span), + token::NtIdent(ident, is_raw) => + Token::new(token::Ident(ident.name, is_raw), ident.span), + token::NtLifetime(ident) => + Token::new(token::Lifetime(ident.name), ident.span), _ => return, } } @@ -4481,7 +4484,9 @@ impl<'a> Parser<'a> { // We used to incorrectly stop parsing macro-expanded statements here. // If the next token will be an error anyway but could have parsed with the // earlier behavior, stop parsing here and emit a warning to avoid breakage. - else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token.kind { + else if macro_legacy_warnings && + self.token.can_begin_expr() && + match self.token.kind { // These can continue an expression, so we can't stop parsing and warn. token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) | token::BinOp(token::Minus) | token::BinOp(token::Star) | @@ -6409,7 +6414,7 @@ impl<'a> Parser<'a> { } /// Given a termination token, parses all of the items in a module. - fn parse_mod_items(&mut self, term: &token::TokenKind, inner_lo: Span) -> PResult<'a, Mod> { + fn parse_mod_items(&mut self, term: &TokenKind, inner_lo: Span) -> PResult<'a, Mod> { let mut items = vec![]; while let Some(item) = self.parse_item()? { items.push(item); diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 58c30a07e3e1b..28a733728bf7b 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -5,11 +5,10 @@ pub use LitKind::*; pub use TokenKind::*; use crate::ast::{self}; -use crate::parse::ParseSess; +use crate::parse::{parse_stream_from_source_str, ParseSess}; use crate::print::pprust; use crate::ptr::P; use crate::symbol::kw; -use crate::syntax::parse::parse_stream_from_source_str; use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree}; use syntax_pos::symbol::Symbol; diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index b4643229285cd..9dea3a4dcc144 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -203,7 +203,8 @@ impl TokenStream { if let Some((_, next)) = iter.peek() { let sp = match (&ts, &next) { (_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue, - ((TokenTree::Token(token_left), NonJoint), (TokenTree::Token(token_right), _)) + ((TokenTree::Token(token_left), NonJoint), + (TokenTree::Token(token_right), _)) if ((token_left.is_ident() && !token_left.is_reserved_ident()) || token_left.is_lit()) && ((token_right.is_ident() && !token_right.is_reserved_ident()) @@ -575,7 +576,7 @@ impl DelimSpan { #[cfg(test)] mod tests { use super::*; - use crate::syntax::ast::Name; + use crate::ast::Name; use crate::with_default_globals; use crate::util::parser_testing::string_to_stream; use syntax_pos::{Span, BytePos, NO_EXPANSION}; diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs index ce1e3276af39b..3886528c74c2f 100644 --- a/src/libsyntax_ext/assert.rs +++ b/src/libsyntax_ext/assert.rs @@ -103,7 +103,8 @@ fn parse_assert<'a>( // // Parse this as an actual message, and suggest inserting a comma. Eventually, this should be // turned into an error. - let custom_message = if let token::Literal(token::Lit { kind: token::Str, .. }) = parser.token.kind { + let custom_message = if let token::Literal(token::Lit { kind: token::Str, .. }) + = parser.token.kind { let mut err = cx.struct_span_warn(parser.span, "unexpected string literal"); let comma_span = cx.source_map().next_point(parser.prev_span); err.span_suggestion_short( diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs index c37aae0bf3184..5dd4d6566ed1c 100644 --- a/src/libsyntax_pos/symbol.rs +++ b/src/libsyntax_pos/symbol.rs @@ -921,10 +921,9 @@ pub struct Interner { impl Interner { fn prefill(init: &[&'static str]) -> Self { - let symbols = (0 .. init.len() as u32).map(Symbol::new); Interner { - strings: init.to_vec(), - names: init.iter().copied().zip(symbols).collect(), + strings: init.into(), + names: init.iter().copied().zip((0..).map(Symbol::new)).collect(), ..Default::default() } } diff --git a/src/test/run-pass-fulldeps/auxiliary/roman-numerals.rs b/src/test/run-pass-fulldeps/auxiliary/roman-numerals.rs index 216c81ca34ce5..4d9e0129e54db 100644 --- a/src/test/run-pass-fulldeps/auxiliary/roman-numerals.rs +++ b/src/test/run-pass-fulldeps/auxiliary/roman-numerals.rs @@ -1,3 +1,9 @@ +// WARNING WARNING WARNING WARNING WARNING +// ======================================= +// +// This code also appears in src/doc/unstable-book/src/language-features/plugin.md. +// Please keep the two copies in sync! FIXME: have rustdoc read this file + // force-host #![crate_type="dylib"] @@ -8,21 +14,15 @@ extern crate syntax_pos; extern crate rustc; extern crate rustc_plugin; -use syntax::parse::token; +use syntax::parse::token::{self, Token}; use syntax::tokenstream::TokenTree; use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager}; -use syntax::ext::build::AstBuilder; // trait for expr_usize +use syntax::ext::build::AstBuilder; // A trait for expr_usize. use syntax_pos::Span; use rustc_plugin::Registry; -// WARNING WARNING WARNING WARNING WARNING -// ======================================= -// -// This code also appears in src/doc/unstable-book/src/language-features/plugin.md. -// Please keep the two copies in sync! FIXME: have rustdoc read this file - fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) - -> Box { + -> Box { static NUMERALS: &'static [(&'static str, usize)] = &[ ("M", 1000), ("CM", 900), ("D", 500), ("CD", 400), @@ -38,7 +38,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) } let text = match args[0] { - TokenTree::Token(_, token::Ident(s, _)) => s.to_string(), + TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(), _ => { cx.span_err(sp, "argument should be a single identifier"); return DummyResult::any(sp); From 3a31f0634bb1669eae64e83f595942986f867125 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 5 Jun 2019 22:04:52 +0300 Subject: [PATCH 14/14] Address review comments --- src/libsyntax/ext/tt/macro_rules.rs | 21 +++++++++------------ src/libsyntax/mut_visit.rs | 2 ++ src/libsyntax/parse/parser.rs | 12 ++++++++---- 3 files changed, 19 insertions(+), 16 deletions(-) diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index d25339a78f43c..7ab51c1eb20c9 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -130,9 +130,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>, } // Which arm's failure should we report? (the one furthest along) - let mut best_fail_spot = DUMMY_SP; - let mut best_fail_tok = None; - let mut best_fail_text = None; + let mut best_failure: Option<(Token, &str)> = None; for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers let lhs_tt = match *lhs { @@ -190,21 +188,20 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>, arm_span, }) } - Failure(token, msg) => if token.span.lo() >= best_fail_spot.lo() { - best_fail_spot = token.span; - best_fail_tok = Some(token.kind); - best_fail_text = Some(msg); - }, + Failure(token, msg) => match best_failure { + Some((ref best_token, _)) if best_token.span.lo() >= token.span.lo() => {} + _ => best_failure = Some((token, msg)) + } Error(err_sp, ref msg) => { cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]) } } } - let best_fail_msg = parse_failure_msg(best_fail_tok.expect("ran no matchers")); - let span = best_fail_spot.substitute_dummy(sp); - let mut err = cx.struct_span_err(span, &best_fail_msg); - err.span_label(span, best_fail_text.unwrap_or(&best_fail_msg)); + let (token, label) = best_failure.expect("ran no matchers"); + let span = token.span.substitute_dummy(sp); + let mut err = cx.struct_span_err(span, &parse_failure_msg(token.kind)); + err.span_label(span, label); if let Some(sp) = def_span { if cx.source_map().span_to_filename(sp).is_real() && !sp.is_dummy() { err.span_label(cx.source_map().def_span(sp), "when calling this macro"); diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index 4d7f0a97b0fa5..d2a614c4a54ac 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -604,6 +604,8 @@ pub fn noop_visit_token(t: &mut Token, vis: &mut T) { let mut ident = Ident::new(*name, *span); vis.visit_ident(&mut ident); *name = ident.name; + *span = ident.span; + return; // avoid visiting the span for the second time } token::Interpolated(nt) => { let mut nt = Lrc::make_mut(nt); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 51bfe3527cf4d..43e7c9330e418 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -197,13 +197,17 @@ enum PrevTokenKind { #[derive(Clone)] pub struct Parser<'a> { pub sess: &'a ParseSess, - /// The current token. + /// The current normalized token. + /// "Normalized" means that some interpolated tokens + /// (`$i: ident` and `$l: lifetime` meta-variables) are replaced + /// with non-interpolated identifier and lifetime tokens they refer to. + /// Perhaps the normalized / non-normalized setup can be simplified somehow. pub token: Token, - /// The span of the previous token. + /// Span of the current non-normalized token. meta_var_span: Option, - /// The span of the previous token. + /// Span of the previous non-normalized token. pub prev_span: Span, - /// The previous token kind. + /// Kind of the previous normalized token (in simplified form). prev_token_kind: PrevTokenKind, restrictions: Restrictions, /// Used to determine the path to externally loaded source files.