Skip to content
Permalink
Browse files

Some code cleanup and tidy/test fixes

  • Loading branch information...
petrochenkov committed Jun 5, 2019
1 parent 738e145 commit ff40e37b98fb44366a329d1b0d9642d462cc6ab6
@@ -56,15 +56,15 @@ extern crate syntax_pos;
extern crate rustc;
extern crate rustc_plugin;
use syntax::parse::token;
use syntax::parse::token::{self, Token};
use syntax::tokenstream::TokenTree;
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
use syntax::ext::build::AstBuilder; // A trait for expr_usize.
use syntax_pos::Span;
use rustc_plugin::Registry;
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
-> Box<MacResult + 'static> {
-> Box<dyn MacResult + 'static> {
static NUMERALS: &'static [(&'static str, usize)] = &[
("M", 1000), ("CM", 900), ("D", 500), ("CD", 400),
@@ -80,7 +80,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
}
let text = match args[0] {
TokenTree::Token(_, token::Ident(s)) => s.to_string(),
TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
_ => {
cx.span_err(sp, "argument should be a single identifier");
return DummyResult::any(sp);
@@ -1414,15 +1414,9 @@ impl KeywordIdents {
fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: TokenStream) {
for tt in tokens.into_trees() {
match tt {
TokenTree::Token(token) => match token.ident() {
// only report non-raw idents
Some((ident, false)) => {
self.check_ident_token(cx, UnderMacro(true), ast::Ident {
span: token.span.substitute_dummy(ident.span),
..ident
});
}
_ => {},
// Only report non-raw idents.
TokenTree::Token(token) => if let Some((ident, false)) = token.ident() {
self.check_ident_token(cx, UnderMacro(true), ident);
}
TokenTree::Delimited(_, _, tts) => {
self.check_tokens(cx, tts)
@@ -483,7 +483,8 @@ impl MetaItem {
Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) |
Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: {
let mut segments = if let token::Ident(name, _) = kind {
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }))
= tokens.peek() {
tokens.next();
vec![PathSegment::from_ident(Ident::new(name, span))]
} else {
@@ -493,12 +494,14 @@ impl MetaItem {
vec![PathSegment::path_root(span)]
};
loop {
if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span })) = tokens.next() {
if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span }))
= tokens.next() {
segments.push(PathSegment::from_ident(Ident::new(name, span)));
} else {
return None;
}
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
if let Some(TokenTree::Token(Token { kind: token::ModSep, .. }))
= tokens.peek() {
tokens.next();
} else {
break;
@@ -77,7 +77,9 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
},
(3, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })),
Some(&TokenTree::Token(Token { kind: token::Comma, .. })),
Some(&TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), .. }))) => {
Some(&TokenTree::Token(Token {
kind: token::Literal(token::Lit { symbol, .. }), ..
}))) => {
(code, Some(symbol))
}
_ => unreachable!()
@@ -3,7 +3,7 @@
//! Since we cannot have a dependency on `librustc`, we implement some types here that are somewhat
//! redundant. Later, these types can be converted to types for use by the rest of the compiler.

use crate::syntax::ast::NodeId;
use crate::ast::NodeId;
use syntax_pos::MultiSpan;

/// Since we cannot import `LintId`s from `rustc::lint`, we define some Ids here which can later be
@@ -24,12 +24,12 @@ pub struct Delimited {

impl Delimited {
/// Returns the opening delimiter (possibly `NoDelim`).
pub fn open_token(&self) -> token::TokenKind {
pub fn open_token(&self) -> TokenKind {
token::OpenDelim(self.delim)
}

/// Returns the closing delimiter (possibly `NoDelim`).
pub fn close_token(&self) -> token::TokenKind {
pub fn close_token(&self) -> TokenKind {
token::CloseDelim(self.delim)
}

@@ -59,7 +59,7 @@ pub struct SequenceRepetition {
/// The sequence of token trees
pub tts: Vec<TokenTree>,
/// The optional separator
pub separator: Option<token::TokenKind>,
pub separator: Option<TokenKind>,
/// Whether the sequence can be repeated zero (*), or one or more times (+)
pub op: KleeneOp,
/// The number of `Match`s that appear in the sequence (and subsequences)
@@ -210,20 +210,21 @@ pub fn parse(
match tree {
TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
let span = match trees.next() {
Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => match trees.next() {
Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
Some((kind, _)) => {
let span = token.span.with_lo(start_sp.lo());
result.push(TokenTree::MetaVarDecl(span, ident, kind));
continue;
}
_ => token.span,
Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) =>
match trees.next() {
Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
Some((kind, _)) => {
let span = token.span.with_lo(start_sp.lo());
result.push(TokenTree::MetaVarDecl(span, ident, kind));
continue;
}
_ => token.span,
},
tree => tree
.as_ref()
.map(tokenstream::TokenTree::span)
.unwrap_or(span),
},
tree => tree
.as_ref()
.map(tokenstream::TokenTree::span)
.unwrap_or(span),
},
tree => tree
.as_ref()
.map(tokenstream::TokenTree::span)
@@ -370,7 +371,7 @@ where

/// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
/// `None`.
fn kleene_op(token: &token::TokenKind) -> Option<KleeneOp> {
fn kleene_op(token: &TokenKind) -> Option<KleeneOp> {
match *token {
token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
@@ -423,7 +424,7 @@ fn parse_sep_and_kleene_op<I>(
attrs: &[ast::Attribute],
edition: Edition,
macro_node_id: NodeId,
) -> (Option<token::TokenKind>, KleeneOp)
) -> (Option<TokenKind>, KleeneOp)
where
I: Iterator<Item = tokenstream::TokenTree>,
{
@@ -448,7 +449,7 @@ fn parse_sep_and_kleene_op_2015<I>(
_features: &Features,
_attrs: &[ast::Attribute],
macro_node_id: NodeId,
) -> (Option<token::TokenKind>, KleeneOp)
) -> (Option<TokenKind>, KleeneOp)
where
I: Iterator<Item = tokenstream::TokenTree>,
{
@@ -566,7 +567,7 @@ fn parse_sep_and_kleene_op_2018<I>(
sess: &ParseSess,
_features: &Features,
_attrs: &[ast::Attribute],
) -> (Option<token::TokenKind>, KleeneOp)
) -> (Option<TokenKind>, KleeneOp)
where
I: Iterator<Item = tokenstream::TokenTree>,
{
@@ -242,7 +242,7 @@ pub fn transcribe(
Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
sp = sp.apply_mark(cx.current_expansion.mark);
result.push(TokenTree::token(token::Dollar, sp).into());
result.push(TokenTree::token(token::TokenKind::from_ast_ident(ident), sp).into());
result.push(TokenTree::token(TokenKind::from_ast_ident(ident), sp).into());
}
}

@@ -137,12 +137,6 @@ pub mod util {

pub mod json;

pub mod syntax {
pub use crate::ext;
pub use crate::parse;
pub use crate::ast;
}

pub mod ast;
pub mod attr;
pub mod source_map;
@@ -2,8 +2,9 @@ use crate::ast::{
self, Arg, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind,
Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, VariantData,
};
use crate::parse::{SeqSep, token, PResult, Parser};
use crate::parse::{SeqSep, PResult, Parser};
use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType};
use crate::parse::token::{self, TokenKind};
use crate::print::pprust;
use crate::ptr::P;
use crate::source_map::Spanned;
@@ -229,8 +230,8 @@ impl<'a> Parser<'a> {

pub fn expected_one_of_not_found(
&mut self,
edible: &[token::TokenKind],
inedible: &[token::TokenKind],
edible: &[TokenKind],
inedible: &[TokenKind],
) -> PResult<'a, bool /* recovered */> {
fn tokens_to_string(tokens: &[TokenType]) -> String {
let mut i = tokens.iter();
@@ -368,7 +369,7 @@ impl<'a> Parser<'a> {

/// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
/// passes through any errors encountered. Used for error recovery.
crate fn eat_to_tokens(&mut self, kets: &[&token::TokenKind]) {
crate fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
let handler = self.diagnostic();

if let Err(ref mut err) = self.parse_seq_to_before_tokens(
@@ -388,7 +389,7 @@ impl<'a> Parser<'a> {
/// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
/// ^^ help: remove extra angle brackets
/// ```
crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::TokenKind) {
crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) {
// This function is intended to be invoked after parsing a path segment where there are two
// cases:
//
@@ -726,7 +727,7 @@ impl<'a> Parser<'a> {
/// closing delimiter.
pub fn unexpected_try_recover(
&mut self,
t: &token::TokenKind,
t: &TokenKind,
) -> PResult<'a, bool /* recovered */> {
let token_str = pprust::token_to_string(t);
let this_token_str = self.this_token_descr();
@@ -903,7 +904,7 @@ impl<'a> Parser<'a> {

crate fn recover_closing_delimiter(
&mut self,
tokens: &[token::TokenKind],
tokens: &[TokenKind],
mut err: DiagnosticBuilder<'a>,
) -> PResult<'a, bool> {
let mut pos = None;
@@ -272,7 +272,8 @@ impl<'a> Parser<'a> {
if self.token == token::Dot {
// Attempt to recover `.4` as `0.4`.
recovered = self.look_ahead(1, |t| {
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = t.kind {
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix })
= t.kind {
let next_span = self.look_ahead_span(1);
if self.span.hi() == next_span.lo() {
let s = String::from("0.") + &symbol.as_str();
@@ -5,7 +5,8 @@ use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
use crate::source_map::{SourceMap, FilePathMapping};
use crate::feature_gate::UnstableFeatures;
use crate::parse::parser::Parser;
use crate::syntax::parse::parser::emit_unclosed_delims;
use crate::parse::parser::emit_unclosed_delims;
use crate::parse::token::TokenKind;
use crate::tokenstream::{TokenStream, TokenTree};
use crate::diagnostics::plugin::ErrorMap;
use crate::print::pprust::token_to_string;
@@ -358,13 +359,13 @@ pub fn stream_to_parser_with_base_dir<'a>(
/// A sequence separator.
pub struct SeqSep {
/// The seperator token.
pub sep: Option<token::TokenKind>,
pub sep: Option<TokenKind>,
/// `true` if a trailing separator is allowed.
pub trailing_sep_allowed: bool,
}

impl SeqSep {
pub fn trailing_allowed(t: token::TokenKind) -> SeqSep {
pub fn trailing_allowed(t: TokenKind) -> SeqSep {
SeqSep {
sep: Some(t),
trailing_sep_allowed: true,
@@ -426,7 +427,9 @@ mod tests {
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
(
4,
Some(&TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. })),
Some(&TokenTree::Token(Token {
kind: token::Ident(name_macro_rules, false), ..
})),
Some(&TokenTree::Token(Token { kind: token::Not, .. })),
Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })),
Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
@@ -446,7 +449,9 @@ mod tests {
(
2,
Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })),
Some(&TokenTree::Token(Token {
kind: token::Ident(name, false), ..
})),
)
if first_delim == token::Paren && name.as_str() == "a" => {},
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
@@ -456,7 +461,9 @@ mod tests {
(
2,
Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })),
Some(&TokenTree::Token(Token {
kind: token::Ident(name, false), ..
})),
)
if second_delim == token::Paren && name.as_str() == "a" => {},
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts),

0 comments on commit ff40e37

Please sign in to comment.
You can’t perform that action at this time.