Skip to content

Commit

Permalink
Auto merge of #61541 - petrochenkov:tsp, r=oli-obk
Browse files Browse the repository at this point in the history
syntax: Keep token span as a part of `Token`

In the world with proc macros and edition hygiene `Token` without a span is not self-contained.
In practice this means that tokens and spans are always stored and passed somewhere along with each other.
This PR combines them into a single struct by doing the next renaming/replacement:

- `Token` -> `TokenKind`
- `TokenAndSpan` -> `Token`
- `(Token, Span)` -> `Token`

Some later commits (fb6e2fe and 1cdee86) remove duplicate spans in `token::Ident` and `token::Lifetime`.
Those spans were supposed to be identical to token spans, but could easily go out of sync, as was noticed in #60965 (comment).
The `(Token, Span)` -> `Token` change is a soft pre-requisite for this de-duplication since it allows to avoid some larger churn (passing spans to most of functions classifying identifiers).
  • Loading branch information
bors committed Jun 7, 2019
2 parents c5295ac + 3a31f06 commit ca1bcfd
Show file tree
Hide file tree
Showing 43 changed files with 978 additions and 959 deletions.
6 changes: 3 additions & 3 deletions src/doc/unstable-book/src/language-features/plugin.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,15 +56,15 @@ extern crate syntax_pos;
extern crate rustc;
extern crate rustc_plugin;
use syntax::parse::token;
use syntax::parse::token::{self, Token};
use syntax::tokenstream::TokenTree;
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
use syntax::ext::build::AstBuilder; // A trait for expr_usize.
use syntax_pos::Span;
use rustc_plugin::Registry;
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
-> Box<MacResult + 'static> {
-> Box<dyn MacResult + 'static> {
static NUMERALS: &'static [(&'static str, usize)] = &[
("M", 1000), ("CM", 900), ("D", 500), ("CD", 400),
Expand All @@ -80,7 +80,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
}
let text = match args[0] {
TokenTree::Token(_, token::Ident(s)) => s.to_string(),
TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
_ => {
cx.span_err(sp, "argument should be a single identifier");
return DummyResult::any(sp);
Expand Down
14 changes: 7 additions & 7 deletions src/librustc/hir/lowering.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ use syntax::source_map::CompilerDesugaringKind::IfTemporary;
use syntax::std_inject;
use syntax::symbol::{kw, sym, Symbol};
use syntax::tokenstream::{TokenStream, TokenTree};
use syntax::parse::token::Token;
use syntax::parse::token::{self, Token};
use syntax::visit::{self, Visitor};
use syntax_pos::{DUMMY_SP, edition, Span};

Expand Down Expand Up @@ -1328,7 +1328,7 @@ impl<'a> LoweringContext<'a> {

fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
match tree {
TokenTree::Token(span, token) => self.lower_token(token, span),
TokenTree::Token(token) => self.lower_token(token),
TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
span,
delim,
Expand All @@ -1337,13 +1337,13 @@ impl<'a> LoweringContext<'a> {
}
}

fn lower_token(&mut self, token: Token, span: Span) -> TokenStream {
match token {
Token::Interpolated(nt) => {
let tts = nt.to_tokenstream(&self.sess.parse_sess, span);
fn lower_token(&mut self, token: Token) -> TokenStream {
match token.kind {
token::Interpolated(nt) => {
let tts = nt.to_tokenstream(&self.sess.parse_sess, token.span);
self.lower_token_stream(tts)
}
other => TokenTree::Token(span, other).into(),
_ => TokenTree::Token(token).into(),
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/librustc/hir/map/def_collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -326,7 +326,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
}

fn visit_token(&mut self, t: Token) {
if let Token::Interpolated(nt) = t {
if let token::Interpolated(nt) = t.kind {
if let token::NtExpr(ref expr) = *nt {
if let ExprKind::Mac(..) = expr.node {
self.visit_macro_invoc(expr.id);
Expand Down
124 changes: 64 additions & 60 deletions src/librustc/ich/impls_syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -261,9 +261,8 @@ for tokenstream::TokenTree {
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
tokenstream::TokenTree::Token(span, ref token) => {
span.hash_stable(hcx, hasher);
hash_token(token, hcx, hasher);
tokenstream::TokenTree::Token(ref token) => {
token.hash_stable(hcx, hasher);
}
tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
span.hash_stable(hcx, hasher);
Expand Down Expand Up @@ -306,70 +305,75 @@ impl_stable_hash_for!(struct token::Lit {
suffix
});

fn hash_token<'a, 'gcx, W: StableHasherResult>(
token: &token::Token,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>,
) {
mem::discriminant(token).hash_stable(hcx, hasher);
match *token {
token::Token::Eq |
token::Token::Lt |
token::Token::Le |
token::Token::EqEq |
token::Token::Ne |
token::Token::Ge |
token::Token::Gt |
token::Token::AndAnd |
token::Token::OrOr |
token::Token::Not |
token::Token::Tilde |
token::Token::At |
token::Token::Dot |
token::Token::DotDot |
token::Token::DotDotDot |
token::Token::DotDotEq |
token::Token::Comma |
token::Token::Semi |
token::Token::Colon |
token::Token::ModSep |
token::Token::RArrow |
token::Token::LArrow |
token::Token::FatArrow |
token::Token::Pound |
token::Token::Dollar |
token::Token::Question |
token::Token::SingleQuote |
token::Token::Whitespace |
token::Token::Comment |
token::Token::Eof => {}

token::Token::BinOp(bin_op_token) |
token::Token::BinOpEq(bin_op_token) => {
std_hash::Hash::hash(&bin_op_token, hasher);
}
impl<'a> HashStable<StableHashingContext<'a>> for token::TokenKind {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
token::Eq |
token::Lt |
token::Le |
token::EqEq |
token::Ne |
token::Ge |
token::Gt |
token::AndAnd |
token::OrOr |
token::Not |
token::Tilde |
token::At |
token::Dot |
token::DotDot |
token::DotDotDot |
token::DotDotEq |
token::Comma |
token::Semi |
token::Colon |
token::ModSep |
token::RArrow |
token::LArrow |
token::FatArrow |
token::Pound |
token::Dollar |
token::Question |
token::SingleQuote |
token::Whitespace |
token::Comment |
token::Eof => {}

token::BinOp(bin_op_token) |
token::BinOpEq(bin_op_token) => {
std_hash::Hash::hash(&bin_op_token, hasher);
}

token::Token::OpenDelim(delim_token) |
token::Token::CloseDelim(delim_token) => {
std_hash::Hash::hash(&delim_token, hasher);
}
token::Token::Literal(lit) => lit.hash_stable(hcx, hasher),
token::OpenDelim(delim_token) |
token::CloseDelim(delim_token) => {
std_hash::Hash::hash(&delim_token, hasher);
}
token::Literal(lit) => lit.hash_stable(hcx, hasher),

token::Token::Ident(ident, is_raw) => {
ident.name.hash_stable(hcx, hasher);
is_raw.hash_stable(hcx, hasher);
}
token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
token::Ident(name, is_raw) => {
name.hash_stable(hcx, hasher);
is_raw.hash_stable(hcx, hasher);
}
token::Lifetime(name) => name.hash_stable(hcx, hasher),

token::Token::Interpolated(_) => {
bug!("interpolated tokens should not be present in the HIR")
}
token::Interpolated(_) => {
bug!("interpolated tokens should not be present in the HIR")
}

token::Token::DocComment(val) |
token::Token::Shebang(val) => val.hash_stable(hcx, hasher),
token::DocComment(val) |
token::Shebang(val) => val.hash_stable(hcx, hasher),
}
}
}

impl_stable_hash_for!(struct token::Token {
kind,
span
});

impl_stable_hash_for!(enum ::syntax::ast::NestedMetaItem {
MetaItem(meta_item),
Literal(lit)
Expand Down
12 changes: 3 additions & 9 deletions src/librustc_lint/builtin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1414,15 +1414,9 @@ impl KeywordIdents {
fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: TokenStream) {
for tt in tokens.into_trees() {
match tt {
TokenTree::Token(span, tok) => match tok.ident() {
// only report non-raw idents
Some((ident, false)) => {
self.check_ident_token(cx, UnderMacro(true), ast::Ident {
span: span.substitute_dummy(ident.span),
..ident
});
}
_ => {},
// Only report non-raw idents.
TokenTree::Token(token) => if let Some((ident, false)) = token.ident() {
self.check_ident_token(cx, UnderMacro(true), ident);
}
TokenTree::Delimited(_, _, tts) => {
self.check_tokens(cx, tts)
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_resolve/build_reduced_graph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1053,7 +1053,7 @@ impl<'a, 'b> Visitor<'a> for BuildReducedGraphVisitor<'a, 'b> {
}

fn visit_token(&mut self, t: Token) {
if let Token::Interpolated(nt) = t {
if let token::Interpolated(nt) = t.kind {
if let token::NtExpr(ref expr) = *nt {
if let ast::ExprKind::Mac(..) = expr.node {
self.visit_invoc(expr.id);
Expand Down
22 changes: 11 additions & 11 deletions src/librustc_save_analysis/span_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use crate::generated_code;
use std::cell::Cell;

use syntax::parse::lexer::{self, StringReader};
use syntax::parse::token::{self, Token};
use syntax::parse::token::{self, TokenKind};
use syntax_pos::*;

#[derive(Clone)]
Expand Down Expand Up @@ -56,15 +56,15 @@ impl<'a> SpanUtils<'a> {
lexer::StringReader::retokenize(&self.sess.parse_sess, span)
}

pub fn sub_span_of_token(&self, span: Span, tok: Token) -> Option<Span> {
pub fn sub_span_of_token(&self, span: Span, tok: TokenKind) -> Option<Span> {
let mut toks = self.retokenise_span(span);
loop {
let next = toks.real_token();
if next.tok == token::Eof {
if next == token::Eof {
return None;
}
if next.tok == tok {
return Some(next.sp);
if next == tok {
return Some(next.span);
}
}
}
Expand All @@ -74,12 +74,12 @@ impl<'a> SpanUtils<'a> {
// let mut toks = self.retokenise_span(span);
// loop {
// let ts = toks.real_token();
// if ts.tok == token::Eof {
// if ts == token::Eof {
// return None;
// }
// if ts.tok == token::Not {
// if ts == token::Not {
// let ts = toks.real_token();
// if ts.tok.is_ident() {
// if ts.kind.is_ident() {
// return Some(ts.sp);
// } else {
// return None;
Expand All @@ -93,12 +93,12 @@ impl<'a> SpanUtils<'a> {
// let mut toks = self.retokenise_span(span);
// let mut prev = toks.real_token();
// loop {
// if prev.tok == token::Eof {
// if prev == token::Eof {
// return None;
// }
// let ts = toks.real_token();
// if ts.tok == token::Not {
// if prev.tok.is_ident() {
// if ts == token::Not {
// if prev.kind.is_ident() {
// return Some(prev.sp);
// } else {
// return None;
Expand Down
Loading

0 comments on commit ca1bcfd

Please sign in to comment.