Skip to content

Commit

Permalink
Move token-to-string functions into print::pprust
Browse files Browse the repository at this point in the history
  • Loading branch information
brendanzab committed Oct 28, 2014
1 parent cd04959 commit 665ad9c
Show file tree
Hide file tree
Showing 5 changed files with 108 additions and 106 deletions.
5 changes: 3 additions & 2 deletions src/libsyntax/ext/tt/macro_parser.rs
Expand Up @@ -87,6 +87,7 @@ use parse::attr::ParserAttr;
use parse::parser::{LifetimeAndTypesWithoutColons, Parser};
use parse::token::{Token, Nonterminal};
use parse::token;
use print::pprust;
use ptr::P;

use std::rc::Rc;
Expand Down Expand Up @@ -402,7 +403,7 @@ pub fn parse(sess: &ParseSess,
nts, next_eis.len()).to_string());
} else if bb_eis.len() == 0u && next_eis.len() == 0u {
return Failure(sp, format!("no rules expected the token `{}`",
token::to_string(&tok)).to_string());
pprust::token_to_string(&tok)).to_string());
} else if next_eis.len() > 0u {
/* Now process the next token */
while next_eis.len() > 0u {
Expand Down Expand Up @@ -449,7 +450,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {
"ident" => match p.token {
token::Ident(sn,b) => { p.bump(); token::NtIdent(box sn,b) }
_ => {
let token_str = token::to_string(&p.token);
let token_str = pprust::token_to_string(&p.token);
p.fatal((format!("expected ident, found {}",
token_str.as_slice())).as_slice())
}
Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/parse/lexer/comments.rs
Expand Up @@ -15,7 +15,7 @@ use parse::lexer::{is_whitespace, Reader};
use parse::lexer::{StringReader, TokenAndSpan};
use parse::lexer::is_block_doc_comment;
use parse::lexer;
use parse::token;
use print::pprust;

use std::io;
use std::str;
Expand Down Expand Up @@ -373,7 +373,7 @@ pub fn gather_comments_and_literals(span_diagnostic: &diagnostic::SpanHandler,
literals.push(Literal {lit: s.to_string(), pos: sp.lo});
})
} else {
debug!("tok: {}", token::to_string(&tok));
debug!("tok: {}", pprust::token_to_string(&tok));
}
first_read = false;
}
Expand Down
3 changes: 2 additions & 1 deletion src/libsyntax/parse/parser.rs
Expand Up @@ -78,6 +78,7 @@ use parse::token::InternedString;
use parse::token::{keywords, special_idents};
use parse::token;
use parse::{new_sub_parser_from_file, ParseSess};
use print::pprust;
use ptr::P;
use owned_slice::OwnedSlice;

Expand Down Expand Up @@ -394,7 +395,7 @@ impl<'a> Parser<'a> {

/// Convert a token to a string using self's reader
pub fn token_to_string(token: &token::Token) -> String {
token::to_string(token)
pprust::token_to_string(token)
}

/// Convert the current token to a string using self's reader
Expand Down
95 changes: 0 additions & 95 deletions src/libsyntax/parse/token.rs
Expand Up @@ -431,101 +431,6 @@ impl fmt::Show for Nonterminal {
}
}

pub fn binop_to_string(o: BinOpToken) -> &'static str {
match o {
Plus => "+",
Minus => "-",
Star => "*",
Slash => "/",
Percent => "%",
Caret => "^",
And => "&",
Or => "|",
Shl => "<<",
Shr => ">>",
}
}

pub fn to_string(t: &Token) -> String {
match *t {
Eq => "=".into_string(),
Lt => "<".into_string(),
Le => "<=".into_string(),
EqEq => "==".into_string(),
Ne => "!=".into_string(),
Ge => ">=".into_string(),
Gt => ">".into_string(),
Not => "!".into_string(),
Tilde => "~".into_string(),
OrOr => "||".into_string(),
AndAnd => "&&".into_string(),
BinOp(op) => binop_to_string(op).into_string(),
BinOpEq(op) => format!("{}=", binop_to_string(op)),

/* Structural symbols */
At => "@".into_string(),
Dot => ".".into_string(),
DotDot => "..".into_string(),
DotDotDot => "...".into_string(),
Comma => ",".into_string(),
Semi => ";".into_string(),
Colon => ":".into_string(),
ModSep => "::".into_string(),
RArrow => "->".into_string(),
LArrow => "<-".into_string(),
FatArrow => "=>".into_string(),
LParen => "(".into_string(),
RParen => ")".into_string(),
LBracket => "[".into_string(),
RBracket => "]".into_string(),
LBrace => "{".into_string(),
RBrace => "}".into_string(),
Pound => "#".into_string(),
Dollar => "$".into_string(),
Question => "?".into_string(),

/* Literals */
LitByte(b) => format!("b'{}'", b.as_str()),
LitChar(c) => format!("'{}'", c.as_str()),
LitFloat(c) => c.as_str().into_string(),
LitInteger(c) => c.as_str().into_string(),
LitStr(s) => format!("\"{}\"", s.as_str()),
LitStrRaw(s, n) => format!("r{delim}\"{string}\"{delim}",
delim="#".repeat(n),
string=s.as_str()),
LitBinary(v) => format!("b\"{}\"", v.as_str()),
LitBinaryRaw(s, n) => format!("br{delim}\"{string}\"{delim}",
delim="#".repeat(n),
string=s.as_str()),

/* Name components */
Ident(s, _) => get_ident(s).get().into_string(),
Lifetime(s) => format!("{}", get_ident(s)),
Underscore => "_".into_string(),

/* Other */
DocComment(s) => s.as_str().into_string(),
Eof => "<eof>".into_string(),
Whitespace => " ".into_string(),
Comment => "/* */".into_string(),
Shebang(s) => format!("/* shebang: {}*/", s.as_str()),

Interpolated(ref nt) => match *nt {
NtExpr(ref e) => ::print::pprust::expr_to_string(&**e),
NtMeta(ref e) => ::print::pprust::meta_item_to_string(&**e),
NtTy(ref e) => ::print::pprust::ty_to_string(&**e),
NtPath(ref e) => ::print::pprust::path_to_string(&**e),
NtItem(..) => "an interpolated item".into_string(),
NtBlock(..) => "an interpolated block".into_string(),
NtStmt(..) => "an interpolated statement".into_string(),
NtPat(..) => "an interpolated pattern".into_string(),
NtIdent(..) => "an interpolated identifier".into_string(),
NtTT(..) => "an interpolated tt".into_string(),
NtMatchers(..) => "an interpolated matcher sequence".into_string(),
}
}
}

// Get the first "argument"
macro_rules! first {
( $first:expr, $( $remainder:expr, )* ) => ( $first )
Expand Down
107 changes: 101 additions & 6 deletions src/libsyntax/print/pprust.rs
Expand Up @@ -21,6 +21,7 @@ use attr::{AttrMetaMethods, AttributeMethods};
use codemap::{CodeMap, BytePos};
use codemap;
use diagnostic;
use parse::token::{BinOpToken, Token};
use parse::token;
use parse::lexer::comments;
use parse;
Expand Down Expand Up @@ -181,6 +182,101 @@ pub fn to_string(f: |&mut State| -> IoResult<()>) -> String {
}
}

pub fn binop_to_string(op: BinOpToken) -> &'static str {
match op {
token::Plus => "+",
token::Minus => "-",
token::Star => "*",
token::Slash => "/",
token::Percent => "%",
token::Caret => "^",
token::And => "&",
token::Or => "|",
token::Shl => "<<",
token::Shr => ">>",
}
}

pub fn token_to_string(tok: &Token) -> String {
match *tok {
token::Eq => "=".into_string(),
token::Lt => "<".into_string(),
token::Le => "<=".into_string(),
token::EqEq => "==".into_string(),
token::Ne => "!=".into_string(),
token::Ge => ">=".into_string(),
token::Gt => ">".into_string(),
token::Not => "!".into_string(),
token::Tilde => "~".into_string(),
token::OrOr => "||".into_string(),
token::AndAnd => "&&".into_string(),
token::BinOp(op) => binop_to_string(op).into_string(),
token::BinOpEq(op) => format!("{}=", binop_to_string(op)),

/* Structural symbols */
token::At => "@".into_string(),
token::Dot => ".".into_string(),
token::DotDot => "..".into_string(),
token::DotDotDot => "...".into_string(),
token::Comma => ",".into_string(),
token::Semi => ";".into_string(),
token::Colon => ":".into_string(),
token::ModSep => "::".into_string(),
token::RArrow => "->".into_string(),
token::LArrow => "<-".into_string(),
token::FatArrow => "=>".into_string(),
token::LParen => "(".into_string(),
token::RParen => ")".into_string(),
token::LBracket => "[".into_string(),
token::RBracket => "]".into_string(),
token::LBrace => "{".into_string(),
token::RBrace => "}".into_string(),
token::Pound => "#".into_string(),
token::Dollar => "$".into_string(),
token::Question => "?".into_string(),

/* Literals */
token::LitByte(b) => format!("b'{}'", b.as_str()),
token::LitChar(c) => format!("'{}'", c.as_str()),
token::LitFloat(c) => c.as_str().into_string(),
token::LitInteger(c) => c.as_str().into_string(),
token::LitStr(s) => format!("\"{}\"", s.as_str()),
token::LitStrRaw(s, n) => format!("r{delim}\"{string}\"{delim}",
delim="#".repeat(n),
string=s.as_str()),
token::LitBinary(v) => format!("b\"{}\"", v.as_str()),
token::LitBinaryRaw(s, n) => format!("br{delim}\"{string}\"{delim}",
delim="#".repeat(n),
string=s.as_str()),

/* Name components */
token::Ident(s, _) => token::get_ident(s).get().into_string(),
token::Lifetime(s) => format!("{}", token::get_ident(s)),
token::Underscore => "_".into_string(),

/* Other */
token::DocComment(s) => s.as_str().into_string(),
token::Eof => "<eof>".into_string(),
token::Whitespace => " ".into_string(),
token::Comment => "/* */".into_string(),
token::Shebang(s) => format!("/* shebang: {}*/", s.as_str()),

token::Interpolated(ref nt) => match *nt {
token::NtExpr(ref e) => expr_to_string(&**e),
token::NtMeta(ref e) => meta_item_to_string(&**e),
token::NtTy(ref e) => ty_to_string(&**e),
token::NtPath(ref e) => path_to_string(&**e),
token::NtItem(..) => "an interpolated item".into_string(),
token::NtBlock(..) => "an interpolated block".into_string(),
token::NtStmt(..) => "an interpolated statement".into_string(),
token::NtPat(..) => "an interpolated pattern".into_string(),
token::NtIdent(..) => "an interpolated identifier".into_string(),
token::NtTT(..) => "an interpolated tt".into_string(),
token::NtMatchers(..) => "an interpolated matcher sequence".into_string(),
}
}
}

// FIXME (Issue #16472): the thing_to_string_impls macro should go away
// after we revise the syntax::ext::quote::ToToken impls to go directly
// to token-trees instead of thing -> string -> token-trees.
Expand Down Expand Up @@ -1026,14 +1122,14 @@ impl<'a> State<'a> {
match *tt {
ast::TtDelimited(_, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice()));
try!(word(&mut self.s, token_to_string(&open.token).as_slice()));
try!(space(&mut self.s));
try!(self.print_tts(tts.as_slice()));
try!(space(&mut self.s));
word(&mut self.s, parse::token::to_string(&close.token).as_slice())
word(&mut self.s, token_to_string(&close.token).as_slice())
},
ast::TtToken(_, ref tk) => {
try!(word(&mut self.s, parse::token::to_string(tk).as_slice()));
try!(word(&mut self.s, token_to_string(tk).as_slice()));
match *tk {
parse::token::DocComment(..) => {
hardbreak(&mut self.s)
Expand All @@ -1049,10 +1145,9 @@ impl<'a> State<'a> {
try!(word(&mut self.s, ")"));
match *separator {
Some(ref tk) => {
try!(word(&mut self.s,
parse::token::to_string(tk).as_slice()));
try!(word(&mut self.s, token_to_string(tk).as_slice()));
}
None => ()
None => {},
}
match kleene_op {
ast::ZeroOrMore => word(&mut self.s, "*"),
Expand Down

5 comments on commit 665ad9c

@bors
Copy link
Contributor

@bors bors commented on 665ad9c Oct 29, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

saw approval from alexcrichton
at brendanzab@665ad9c

@bors
Copy link
Contributor

@bors bors commented on 665ad9c Oct 29, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

merging bjz/rust/token = 665ad9c into auto

@bors
Copy link
Contributor

@bors bors commented on 665ad9c Oct 29, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

bjz/rust/token = 665ad9c merged ok, testing candidate = 3bc5453

@bors
Copy link
Contributor

@bors bors commented on 665ad9c Oct 29, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@bors
Copy link
Contributor

@bors bors commented on 665ad9c Oct 29, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fast-forwarding master to auto = 3bc5453

Please sign in to comment.