diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index edfe097c72f61..b5d9b761773b4 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -735,9 +735,9 @@ pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) - raw_attr.clone(), ); - let start_span = parser.span; + let start_span = parser.token.span; let (path, tokens) = panictry!(parser.parse_meta_item_unrestricted()); - let end_span = parser.span; + let end_span = parser.token.span; if parser.token != token::Eof { parse_sess.span_diagnostic .span_err(start_span.to(end_span), "invalid crate attribute"); diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index fc413caa428dd..6123e95ccf821 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -121,7 +121,7 @@ impl<'a> StripUnconfigured<'a> { let mut expanded_attrs = Vec::with_capacity(1); while !parser.check(&token::CloseDelim(token::Paren)) { - let lo = parser.span.lo(); + let lo = parser.token.span.lo(); let (path, tokens) = parser.parse_meta_item_unrestricted()?; expanded_attrs.push((path, tokens, parser.prev_span.with_lo(lo))); parser.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Paren)])?; diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 7cd847eac4690..9960539555332 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -1041,7 +1041,7 @@ impl<'a> Parser<'a> { let msg = format!("macro expansion ignores token `{}` and any following", self.this_token_to_string()); // Avoid emitting backtrace info twice. - let def_site_span = self.span.with_ctxt(SyntaxContext::empty()); + let def_site_span = self.token.span.with_ctxt(SyntaxContext::empty()); let mut err = self.diagnostic().struct_span_err(def_site_span, &msg); err.span_label(span, "caused by the macro expansion here"); let msg = format!( diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index e1cb90d9e71d6..4e2aab46542d2 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -105,7 +105,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstrea while self.p.token != token::Eof { match panictry!(self.p.parse_item()) { Some(item) => ret.push(item), - None => self.p.diagnostic().span_fatal(self.p.span, + None => self.p.diagnostic().span_fatal(self.p.token.span, &format!("expected item, found `{}`", self.p.this_token_to_string())) .raise() diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 82cc9e8ac2280..f98e1433356c2 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -675,7 +675,7 @@ pub fn parse( // // This MatcherPos instance is allocated on the stack. All others -- and // there are frequently *no* others! -- are allocated on the heap. - let mut initial = initial_matcher_pos(ms, parser.span); + let mut initial = initial_matcher_pos(ms, parser.token.span); let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)]; let mut next_items = Vec::new(); @@ -721,15 +721,15 @@ pub fn parse( return nameize(sess, ms, matches); } else if eof_items.len() > 1 { return Error( - parser.span, + parser.token.span, "ambiguity: multiple successful parses".to_string(), ); } else { return Failure( - Token::new(token::Eof, if parser.span.is_dummy() { - parser.span + Token::new(token::Eof, if parser.token.span.is_dummy() { + parser.token.span } else { - sess.source_map().next_point(parser.span) + sess.source_map().next_point(parser.token.span) }), "missing tokens in macro arguments", ); @@ -753,7 +753,7 @@ pub fn parse( .join(" or "); return Error( - parser.span, + parser.token.span, format!( "local ambiguity: multiple parsing options: {}", match next_items.len() { @@ -927,7 +927,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: Symbol) -> Nonterminal { sym::ty => token::NtTy(panictry!(p.parse_ty())), // this could be handled like a token, since it is one sym::ident => if let Some((name, is_raw)) = get_macro_name(&p.token) { - let span = p.span; + let span = p.token.span; p.bump(); token::NtIdent(Ident::new(name, span), is_raw) } else { diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 7ab51c1eb20c9..4998129fdee51 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -47,7 +47,7 @@ impl<'a> ParserAnyMacro<'a> { let fragment = panictry!(parser.parse_ast_fragment(kind, true).map_err(|mut e| { if parser.token == token::Eof && e.message().ends_with(", found ``") { if !e.span.is_dummy() { // early end of macro arm (#52866) - e.replace_span_with(parser.sess.source_map().next_point(parser.span)); + e.replace_span_with(parser.sess.source_map().next_point(parser.token.span)); } let msg = &e.message[0]; e.message[0] = ( @@ -63,7 +63,7 @@ impl<'a> ParserAnyMacro<'a> { if parser.sess.source_map().span_to_filename(arm_span).is_real() { e.span_label(arm_span, "in this macro arm"); } - } else if !parser.sess.source_map().span_to_filename(parser.span).is_real() { + } else if !parser.sess.source_map().span_to_filename(parser.token.span).is_real() { e.span_label(site_span, "in this macro invocation"); } e diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index d83b76f4d2366..77a87e26e60d5 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -39,7 +39,7 @@ impl<'a> Parser<'a> { just_parsed_doc_comment = false; } token::DocComment(s) => { - let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span); + let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.token.span); if attr.style != ast::AttrStyle::Outer { let mut err = self.fatal("expected outer doc comment"); err.note("inner doc comments like this (starting with \ @@ -83,7 +83,7 @@ impl<'a> Parser<'a> { self.token); let (span, path, tokens, style) = match self.token.kind { token::Pound => { - let lo = self.span; + let lo = self.token.span; self.bump(); if let InnerAttributeParsePolicy::Permitted = inner_parse_policy { @@ -93,7 +93,7 @@ impl<'a> Parser<'a> { self.bump(); if let InnerAttributeParsePolicy::NotPermitted { reason } = inner_parse_policy { - let span = self.span; + let span = self.token.span; self.diagnostic() .struct_span_err(span, reason) .note("inner attributes, like `#![no_std]`, annotate the item \ @@ -201,7 +201,7 @@ impl<'a> Parser<'a> { } token::DocComment(s) => { // we need to get the position of this token before we bump. - let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span); + let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.token.span); if attr.style == ast::AttrStyle::Inner { attrs.push(attr); self.bump(); @@ -249,7 +249,7 @@ impl<'a> Parser<'a> { return Ok(meta); } - let lo = self.span; + let lo = self.token.span; let path = self.parse_path(PathStyle::Mod)?; let node = self.parse_meta_item_kind()?; let span = lo.to(self.prev_span); @@ -284,7 +284,7 @@ impl<'a> Parser<'a> { let found = self.this_token_to_string(); let msg = format!("expected unsuffixed literal or identifier, found `{}`", found); - Err(self.diagnostic().struct_span_err(self.span, &msg)) + Err(self.diagnostic().struct_span_err(self.token.span, &msg)) } /// matches meta_seq = ( COMMASEP(meta_item_inner) ) diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs index 7f0bf4a90508b..c4db9a9df45a9 100644 --- a/src/libsyntax/parse/diagnostics.rs +++ b/src/libsyntax/parse/diagnostics.rs @@ -162,7 +162,7 @@ impl RecoverQPath for Expr { impl<'a> Parser<'a> { pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> { - self.span_fatal(self.span, m) + self.span_fatal(self.token.span, m) } pub fn span_fatal>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { @@ -174,7 +174,7 @@ impl<'a> Parser<'a> { } pub fn bug(&self, m: &str) -> ! { - self.sess.span_diagnostic.span_bug(self.span, m) + self.sess.span_diagnostic.span_bug(self.token.span, m) } pub fn span_err>(&self, sp: S, m: &str) { @@ -199,13 +199,13 @@ impl<'a> Parser<'a> { crate fn expected_ident_found(&self) -> DiagnosticBuilder<'a> { let mut err = self.struct_span_err( - self.span, + self.token.span, &format!("expected identifier, found {}", self.this_token_descr()), ); if let token::Ident(name, false) = self.token.kind { - if Ident::new(name, self.span).is_raw_guess() { + if Ident::new(name, self.token.span).is_raw_guess() { err.span_suggestion( - self.span, + self.token.span, "you can escape reserved keywords to use them as identifiers", format!("r#{}", name), Applicability::MaybeIncorrect, @@ -213,12 +213,12 @@ impl<'a> Parser<'a> { } } if let Some(token_descr) = self.token_descr() { - err.span_label(self.span, format!("expected identifier, found {}", token_descr)); + err.span_label(self.token.span, format!("expected identifier, found {}", token_descr)); } else { - err.span_label(self.span, "expected identifier"); + err.span_label(self.token.span, "expected identifier"); if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) { err.span_suggestion( - self.span, + self.token.span, "remove this comma", String::new(), Applicability::MachineApplicable, @@ -277,11 +277,11 @@ impl<'a> Parser<'a> { (self.sess.source_map().next_point(self.prev_span), format!("expected {} here", expect))) }; - self.last_unexpected_token_span = Some(self.span); + self.last_unexpected_token_span = Some(self.token.span); let mut err = self.fatal(&msg_exp); if self.token.is_ident_named(sym::and) { err.span_suggestion_short( - self.span, + self.token.span, "use `&&` instead of `and` for the boolean operator", "&&".to_string(), Applicability::MaybeIncorrect, @@ -289,7 +289,7 @@ impl<'a> Parser<'a> { } if self.token.is_ident_named(sym::or) { err.span_suggestion_short( - self.span, + self.token.span, "use `||` instead of `or` for the boolean operator", "||".to_string(), Applicability::MaybeIncorrect, @@ -326,7 +326,7 @@ impl<'a> Parser<'a> { self.token.is_keyword(kw::While) ); let cm = self.sess.source_map(); - match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { + match (cm.lookup_line(self.token.span.lo()), cm.lookup_line(sp.lo())) { (Ok(ref a), Ok(ref b)) if a.line != b.line && is_semi_suggestable => { // The spans are in different lines, expected `;` and found `let` or `return`. // High likelihood that it is only a missing `;`. @@ -352,16 +352,16 @@ impl<'a> Parser<'a> { // | -^^^^^ unexpected token // | | // | expected one of 8 possible tokens here - err.span_label(self.span, label_exp); + err.span_label(self.token.span, label_exp); } _ if self.prev_span == syntax_pos::DUMMY_SP => { // Account for macro context where the previous span might not be // available to avoid incorrect output (#54841). - err.span_label(self.span, "unexpected token"); + err.span_label(self.token.span, "unexpected token"); } _ => { err.span_label(sp, label_exp); - err.span_label(self.span, "unexpected token"); + err.span_label(self.token.span, "unexpected token"); } } Err(err) @@ -429,7 +429,7 @@ impl<'a> Parser<'a> { // Keep the span at the start so we can highlight the sequence of `>` characters to be // removed. - let lo = self.span; + let lo = self.token.span; // We need to look-ahead to see if we have `>` characters without moving the cursor forward // (since we might have the field access case and the characters we're eating are @@ -474,7 +474,7 @@ impl<'a> Parser<'a> { // Eat from where we started until the end token so that parsing can continue // as if we didn't have those extra angle brackets. self.eat_to_tokens(&[&end]); - let span = lo.until(self.span); + let span = lo.until(self.token.span); let plural = number_of_gt > 1 || number_of_shr >= 1; self.diagnostic() @@ -502,7 +502,7 @@ impl<'a> Parser<'a> { match lhs.node { ExprKind::Binary(op, _, _) if op.node.is_comparison() => { // respan to include both operators - let op_span = op.span.to(self.span); + let op_span = op.span.to(self.token.span); let mut err = self.diagnostic().struct_span_err(op_span, "chained comparison operators require parentheses"); if op.node == BinOpKind::Lt && @@ -734,15 +734,15 @@ impl<'a> Parser<'a> { let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) { // Point at the end of the macro call when reaching end of macro arguments. (token::Eof, Some(_)) => { - let sp = self.sess.source_map().next_point(self.span); + let sp = self.sess.source_map().next_point(self.token.span); (sp, sp) } // We don't want to point at the following span after DUMMY_SP. // This happens when the parser finds an empty TokenStream. - _ if self.prev_span == DUMMY_SP => (self.span, self.span), + _ if self.prev_span == DUMMY_SP => (self.token.span, self.token.span), // EOF, don't want to point at the following char, but rather the last token. - (token::Eof, None) => (self.prev_span, self.span), - _ => (self.sess.source_map().next_point(self.prev_span), self.span), + (token::Eof, None) => (self.prev_span, self.token.span), + _ => (self.sess.source_map().next_point(self.prev_span), self.token.span), }; let msg = format!( "expected `{}`, found {}", @@ -789,7 +789,7 @@ impl<'a> Parser<'a> { // interpreting `await { }?` as `?.await`. self.parse_block_expr( None, - self.span, + self.token.span, BlockCheckMode::Default, ThinVec::new(), ) @@ -819,9 +819,9 @@ impl<'a> Parser<'a> { self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren)) { // future.await() - let lo = self.span; + let lo = self.token.span; self.bump(); // ( - let sp = lo.to(self.span); + let sp = lo.to(self.token.span); self.bump(); // ) self.struct_span_err(sp, "incorrect use of `await`") .span_suggestion( @@ -854,7 +854,7 @@ impl<'a> Parser<'a> { next_sp: Span, maybe_path: bool, ) { - err.span_label(self.span, "expecting a type here because of type ascription"); + err.span_label(self.token.span, "expecting a type here because of type ascription"); let cm = self.sess.source_map(); let next_pos = cm.lookup_char_pos(next_sp.lo()); let op_pos = cm.lookup_char_pos(cur_op_span.hi()); @@ -911,7 +911,7 @@ impl<'a> Parser<'a> { // we want to use the last closing delim that would apply for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() { if tokens.contains(&token::CloseDelim(unmatched.expected_delim)) - && Some(self.span) > unmatched.unclosed_span + && Some(self.token.span) > unmatched.unclosed_span { pos = Some(i); } @@ -1070,28 +1070,28 @@ impl<'a> Parser<'a> { crate fn expected_semi_or_open_brace(&mut self) -> PResult<'a, ast::TraitItem> { let token_str = self.this_token_descr(); let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", token_str)); - err.span_label(self.span, "expected `;` or `{`"); + err.span_label(self.token.span, "expected `;` or `{`"); Err(err) } crate fn eat_incorrect_doc_comment(&mut self, applied_to: &str) { if let token::DocComment(_) = self.token.kind { let mut err = self.diagnostic().struct_span_err( - self.span, + self.token.span, &format!("documentation comments cannot be applied to {}", applied_to), ); - err.span_label(self.span, "doc comments are not allowed here"); + err.span_label(self.token.span, "doc comments are not allowed here"); err.emit(); self.bump(); } else if self.token == token::Pound && self.look_ahead(1, |t| { *t == token::OpenDelim(token::Bracket) }) { - let lo = self.span; + let lo = self.token.span; // Skip every token until next possible arg. while self.token != token::CloseDelim(token::Bracket) { self.bump(); } - let sp = lo.to(self.span); + let sp = lo.to(self.token.span); self.bump(); let mut err = self.diagnostic().struct_span_err( sp, @@ -1217,16 +1217,16 @@ impl<'a> Parser<'a> { crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> { let (span, msg) = match (&self.token.kind, self.subparser_name) { (&token::Eof, Some(origin)) => { - let sp = self.sess.source_map().next_point(self.span); + let sp = self.sess.source_map().next_point(self.token.span); (sp, format!("expected expression, found end of {}", origin)) } - _ => (self.span, format!( + _ => (self.token.span, format!( "expected expression, found {}", self.this_token_descr(), )), }; let mut err = self.struct_span_err(span, &msg); - let sp = self.sess.source_map().start_point(self.span); + let sp = self.sess.source_map().start_point(self.token.span); if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) { self.sess.expr_parentheses_needed(&mut err, *sp, None); } diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 7d5356ffe4d8d..cf869a1ce0fd9 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -271,14 +271,13 @@ impl<'a> Parser<'a> { let mut recovered = None; if self.token == token::Dot { // Attempt to recover `.4` as `0.4`. - recovered = self.look_ahead(1, |t| { + recovered = self.look_ahead(1, |next_token| { if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) - = t.kind { - let next_span = self.look_ahead_span(1); - if self.span.hi() == next_span.lo() { + = next_token.kind { + if self.token.span.hi() == next_token.span.lo() { let s = String::from("0.") + &symbol.as_str(); let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); - return Some(Token::new(kind, self.span.to(next_span))); + return Some(Token::new(kind, self.token.span.to(next_token.span))); } } None diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 063823bbf4d11..1d708d39a1379 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -239,8 +239,8 @@ fn maybe_source_file_to_parser( let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?; let mut parser = stream_to_parser(sess, stream, None); parser.unclosed_delims = unclosed_delims; - if parser.token == token::Eof && parser.span.is_dummy() { - parser.token.span = Span::new(end_pos, end_pos, parser.span.ctxt()); + if parser.token == token::Eof && parser.token.span.is_dummy() { + parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt()); } Ok(parser) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 43e7c9330e418..3acd708814560 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -34,6 +34,7 @@ use crate::ast::{BinOpKind, UnOp}; use crate::ast::{RangeEnd, RangeSyntax}; use crate::{ast, attr}; use crate::ext::base::DummyResult; +use crate::ext::hygiene::SyntaxContext; use crate::source_map::{self, SourceMap, Spanned, respan}; use crate::parse::{SeqSep, classify, literal, token}; use crate::parse::lexer::UnmatchedBrace; @@ -57,7 +58,6 @@ use log::debug; use std::borrow::Cow; use std::cmp; use std::mem; -use std::ops::Deref; use std::path::{self, Path, PathBuf}; use std::slice; @@ -132,12 +132,16 @@ macro_rules! maybe_whole_expr { token::NtPath(path) => { let path = path.clone(); $p.bump(); - return Ok($p.mk_expr($p.span, ExprKind::Path(None, path), ThinVec::new())); + return Ok($p.mk_expr( + $p.token.span, ExprKind::Path(None, path), ThinVec::new() + )); } token::NtBlock(block) => { let block = block.clone(); $p.bump(); - return Ok($p.mk_expr($p.span, ExprKind::Block(block, None), ThinVec::new())); + return Ok($p.mk_expr( + $p.token.span, ExprKind::Block(block, None), ThinVec::new() + )); } _ => {}, }; @@ -246,15 +250,6 @@ impl<'a> Drop for Parser<'a> { } } -// FIXME: Parser uses `self.span` all the time. -// Remove this impl if you think that using `self.token.span` instead is acceptable. -impl Deref for Parser<'_> { - type Target = Token; - fn deref(&self) -> &Self::Target { - &self.token - } -} - #[derive(Clone)] crate struct TokenCursor { crate frame: TokenCursorFrame, @@ -514,8 +509,9 @@ impl<'a> Parser<'a> { if let Some(directory) = directory { parser.directory = directory; - } else if !parser.span.is_dummy() { - if let FileName::Real(mut path) = sess.source_map().span_to_unmapped_path(parser.span) { + } else if !parser.token.span.is_dummy() { + if let FileName::Real(mut path) = + sess.source_map().span_to_unmapped_path(parser.token.span) { path.pop(); parser.directory.path = Cow::from(path); } @@ -596,7 +592,7 @@ impl<'a> Parser<'a> { } else if inedible.contains(&self.token) { // leave it in the input Ok(false) - } else if self.last_unexpected_token_span == Some(self.span) { + } else if self.last_unexpected_token_span == Some(self.token.span) { FatalError.raise(); } else { self.expected_one_of_not_found(edible, inedible) @@ -632,7 +628,7 @@ impl<'a> Parser<'a> { return Err(err); } } - let span = self.span; + let span = self.token.span; self.bump(); Ok(Ident::new(name, span)) } @@ -748,7 +744,7 @@ impl<'a> Parser<'a> { true } token::BinOpEq(token::Plus) => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); self.bump_with(token::Eq, span); true } @@ -779,7 +775,7 @@ impl<'a> Parser<'a> { Ok(()) } token::AndAnd => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); Ok(self.bump_with(token::BinOp(token::And), span)) } _ => self.unexpected() @@ -796,7 +792,7 @@ impl<'a> Parser<'a> { Ok(()) } token::OrOr => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); Ok(self.bump_with(token::BinOp(token::Or), span)) } _ => self.unexpected() @@ -821,12 +817,12 @@ impl<'a> Parser<'a> { true } token::BinOp(token::Shl) => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); self.bump_with(token::Lt, span); true } token::LArrow => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); self.bump_with(token::BinOp(token::Minus), span); true } @@ -861,15 +857,15 @@ impl<'a> Parser<'a> { Some(()) } token::BinOp(token::Shr) => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); Some(self.bump_with(token::Gt, span)) } token::BinOpEq(token::Shr) => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); Some(self.bump_with(token::Ge, span)) } token::Ge => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); Some(self.bump_with(token::Eq, span)) } _ => None, @@ -1018,7 +1014,7 @@ impl<'a> Parser<'a> { self.bug("attempted to bump the parser past EOF (may be stuck in a loop)"); } - self.prev_span = self.meta_var_span.take().unwrap_or(self.span); + self.prev_span = self.meta_var_span.take().unwrap_or(self.token.span); // Record last token kind for possible error recovery. self.prev_token_kind = match self.token.kind { @@ -1041,7 +1037,7 @@ impl<'a> Parser<'a> { /// Advance the parser using provided token as a next one. Use this when /// consuming a part of a token. For example a single `<` from `<<`. fn bump_with(&mut self, next: TokenKind, span: Span) { - self.prev_span = self.span.with_hi(span.lo()); + self.prev_span = self.token.span.with_hi(span.lo()); // It would be incorrect to record the kind of the current token, but // fortunately for tokens currently using `bump_with`, the // prev_token_kind will be of no use anyway. @@ -1068,18 +1064,6 @@ impl<'a> Parser<'a> { }) } - crate fn look_ahead_span(&self, dist: usize) -> Span { - if dist == 0 { - return self.span - } - - match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { - Some(TokenTree::Token(token)) => token.span, - Some(TokenTree::Delimited(span, ..)) => span.entire(), - None => self.look_ahead_span(dist - 1), - } - } - /// Returns whether any of the given keywords are `dist` tokens ahead of the current one. fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool { self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw))) @@ -1171,7 +1155,7 @@ impl<'a> Parser<'a> { fn parse_trait_item_(&mut self, at_end: &mut bool, mut attrs: Vec) -> PResult<'a, TraitItem> { - let lo = self.span; + let lo = self.token.span; self.eat_bad_pub(); let (name, node, generics) = if self.eat_keyword(kw::Type) { self.parse_trait_item_assoc_ty()? @@ -1204,7 +1188,7 @@ impl<'a> Parser<'a> { // definition... // We don't allow argument names to be left off in edition 2018. - p.parse_arg_general(p.span.rust_2018(), true, false) + p.parse_arg_general(p.token.span.rust_2018(), true, false) })?; generics.where_clause = self.parse_where_clause()?; @@ -1268,7 +1252,7 @@ impl<'a> Parser<'a> { if self.eat(&token::RArrow) { Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true, false)?)) } else { - Ok(FunctionRetTy::Default(self.span.shrink_to_lo())) + Ok(FunctionRetTy::Default(self.token.span.shrink_to_lo())) } } @@ -1292,7 +1276,7 @@ impl<'a> Parser<'a> { maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery); maybe_whole!(self, NtTy, |x| x); - let lo = self.span; + let lo = self.token.span; let mut impl_dyn_multi = false; let node = if self.eat(&token::OpenDelim(token::Paren)) { // `(TYPE)` is a parenthesized type. @@ -1376,7 +1360,7 @@ impl<'a> Parser<'a> { // Function pointer type or bound list (trait object type) starting with a poly-trait. // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T` // `for<'lt> Trait1<'lt> + Trait2 + 'a` - let lo = self.span; + let lo = self.token.span; let lifetime_defs = self.parse_late_bound_lifetime_defs()?; if self.token_is_bare_fn_keyword() { self.parse_ty_bare_fn(lifetime_defs)? @@ -1391,7 +1375,7 @@ impl<'a> Parser<'a> { impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus; TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds) } else if self.check_keyword(kw::Dyn) && - (self.span.rust_2018() || + (self.token.span.rust_2018() || self.look_ahead(1, |t| t.can_begin_bound() && !can_continue_type_after_non_fn_ident(t))) { self.bump(); // `dyn` @@ -1604,9 +1588,9 @@ impl<'a> Parser<'a> { crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P> { maybe_whole_expr!(self); - let minus_lo = self.span; + let minus_lo = self.token.span; let minus_present = self.eat(&token::BinOp(token::Minus)); - let lo = self.span; + let lo = self.token.span; let literal = self.parse_lit()?; let hi = self.prev_span; let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new()); @@ -1623,7 +1607,7 @@ impl<'a> Parser<'a> { fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { match self.token.kind { token::Ident(name, _) if name.is_path_segment_keyword() => { - let span = self.span; + let span = self.token.span; self.bump(); Ok(Ident::new(name, span)) } @@ -1634,7 +1618,7 @@ impl<'a> Parser<'a> { fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> { match self.token.kind { token::Ident(name, false) if name == kw::Underscore => { - let span = self.span; + let span = self.token.span; self.bump(); Ok(Ident::new(name, span)) } @@ -1662,11 +1646,11 @@ impl<'a> Parser<'a> { // span in the case of something like `::Bar`. let (mut path, path_span); if self.eat_keyword(kw::As) { - let path_lo = self.span; + let path_lo = self.token.span; path = self.parse_path(PathStyle::Type)?; path_span = path_lo.to(self.prev_span); } else { - path_span = self.span.to(self.span); + path_span = self.token.span.to(self.token.span); path = ast::Path { segments: Vec::new(), span: path_span }; } @@ -1704,9 +1688,9 @@ impl<'a> Parser<'a> { path }); - let lo = self.meta_var_span.unwrap_or(self.span); + let lo = self.meta_var_span.unwrap_or(self.token.span); let mut segments = Vec::new(); - let mod_sep_ctxt = self.span.ctxt(); + let mod_sep_ctxt = self.token.span.ctxt(); if self.eat(&token::ModSep) { segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt))); } @@ -1797,7 +1781,7 @@ impl<'a> Parser<'a> { // Generic arguments are found - `<`, `(`, `::<` or `::(`. self.eat(&token::ModSep); - let lo = self.span; + let lo = self.token.span; let args = if self.eat_lt() { // `<'a, T, A = U>` let (args, constraints) = @@ -1840,17 +1824,17 @@ impl<'a> Parser<'a> { /// Parses a single lifetime `'a` or panics. crate fn expect_lifetime(&mut self) -> Lifetime { if let Some(ident) = self.token.lifetime() { - let span = self.span; + let span = self.token.span; self.bump(); Lifetime { ident: Ident::new(ident.name, span), id: ast::DUMMY_NODE_ID } } else { - self.span_bug(self.span, "not a lifetime") + self.span_bug(self.token.span, "not a lifetime") } } fn eat_label(&mut self) -> Option