Skip to content

Commit

Permalink
Use an enum rather than a bool in token::Ident
Browse files Browse the repository at this point in the history
  • Loading branch information
brendanzab committed Oct 28, 2014
1 parent fcb78d6 commit cd04959
Show file tree
Hide file tree
Showing 6 changed files with 96 additions and 57 deletions.
5 changes: 3 additions & 2 deletions src/grammar/verify.rs
Expand Up @@ -35,7 +35,7 @@ use syntax::parse::lexer::TokenAndSpan;

fn parse_token_list(file: &str) -> HashMap<String, Token> {
fn id() -> Token {
token::Ident(ast::Ident { name: Name(0), ctxt: 0, }, false)
token::Ident(ast::Ident { name: Name(0), ctxt: 0, }, token::Plain)
}

let mut res = HashMap::new();
Expand Down Expand Up @@ -198,7 +198,8 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, Token>) -> TokenAndSpan {
token::LitFloat(..) => token::LitFloat(nm),
token::LitBinary(..) => token::LitBinary(nm),
token::LitBinaryRaw(..) => token::LitBinaryRaw(fix(content), count(content)),
token::Ident(..) => token::Ident(ast::Ident { name: nm, ctxt: 0 }, true),
token::Ident(..) => token::Ident(ast::Ident { name: nm, ctxt: 0 },
token::ModName),
token::Lifetime(..) => token::Lifetime(ast::Ident { name: nm, ctxt: 0 }),
ref t => t.clone()
};
Expand Down
9 changes: 7 additions & 2 deletions src/libsyntax/ext/quote.rs
Expand Up @@ -531,6 +531,7 @@ fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOpToken) -> P<ast::Expr> {
mk_token_path(cx, sp, name)
}

#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
match *tok {
token::BinOp(binop) => {
Expand Down Expand Up @@ -575,10 +576,14 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
vec!(mk_name(cx, sp, ident.ident()), cx.expr_uint(sp, n)));
}

token::Ident(ident, b) => {
token::Ident(ident, style) => {
return cx.expr_call(sp,
mk_token_path(cx, sp, "Ident"),
vec!(mk_ident(cx, sp, ident), cx.expr_bool(sp, b)));
vec![mk_ident(cx, sp, ident),
match style {
ModName => mk_token_path(cx, sp, "ModName"),
Plain => mk_token_path(cx, sp, "Plain"),
}]);
}

token::Lifetime(ident) => {
Expand Down
61 changes: 36 additions & 25 deletions src/libsyntax/parse/lexer/mod.rs
Expand Up @@ -921,12 +921,14 @@ impl<'a> StringReader<'a> {
if string == "_" {
token::Underscore
} else {
let is_mod_name = self.curr_is(':') && self.nextch_is(':');

// FIXME: perform NFKC normalization here. (Issue #2253)
token::Ident(str_to_ident(string), is_mod_name)
if self.curr_is(':') && self.nextch_is(':') {
token::Ident(str_to_ident(string), token::ModName)
} else {
token::Ident(str_to_ident(string), token::Plain)
}
}
})
});
}

if is_dec_digit(c) {
Expand All @@ -937,8 +939,11 @@ impl<'a> StringReader<'a> {
match (c.unwrap(), self.nextch(), self.nextnextch()) {
('\x00', Some('n'), Some('a')) => {
let ast_ident = self.scan_embedded_hygienic_ident();
let is_mod_name = self.curr_is(':') && self.nextch_is(':');
return token::Ident(ast_ident, is_mod_name);
return if self.curr_is(':') && self.nextch_is(':') {
token::Ident(ast_ident, token::ModName)
} else {
token::Ident(ast_ident, token::Plain)
};
}
_ => {}
}
Expand Down Expand Up @@ -1056,7 +1061,7 @@ impl<'a> StringReader<'a> {
str_to_ident(lifetime_name)
});
let keyword_checking_token =
&token::Ident(keyword_checking_ident, false);
&token::Ident(keyword_checking_ident, token::Plain);
let last_bpos = self.last_pos;
if keyword_checking_token.is_keyword(token::keywords::Self) {
self.err_span_(start,
Expand Down Expand Up @@ -1434,7 +1439,7 @@ mod test {
assert_eq!(string_reader.next_token().tok, token::Whitespace);
let tok1 = string_reader.next_token();
let tok2 = TokenAndSpan{
tok:token::Ident(id, false),
tok:token::Ident(id, token::Plain),
sp:Span {lo:BytePos(21),hi:BytePos(23),expn_id: NO_EXPANSION}};
assert_eq!(tok1,tok2);
assert_eq!(string_reader.next_token().tok, token::Whitespace);
Expand All @@ -1443,7 +1448,7 @@ mod test {
// read another token:
let tok3 = string_reader.next_token();
let tok4 = TokenAndSpan{
tok:token::Ident(str_to_ident("main"), false),
tok:token::Ident(str_to_ident("main"), token::Plain),
sp:Span {lo:BytePos(24),hi:BytePos(28),expn_id: NO_EXPANSION}};
assert_eq!(tok3,tok4);
// the lparen is already read:
Expand All @@ -1458,39 +1463,45 @@ mod test {
}
}

// make the identifier by looking up the string in the interner
#[cfg(stage0)]
fn mk_ident (id: &str, is_mod_name: bool) -> token::Token {
token::Ident (str_to_ident(id),is_mod_name)
token::Ident(str_to_ident(id), is_mod_name)
}

// make the identifier by looking up the string in the interner
#[cfg(not(stage0))]
fn mk_ident(id: &str, style: token::IdentStyle) -> token::Token {
token::Ident(str_to_ident(id), style)
}

#[test] fn doublecolonparsing () {
check_tokenization(setup(&mk_sh(), "a b".to_string()),
vec!(mk_ident("a",false),
token::Whitespace,
mk_ident("b",false)));
vec![mk_ident("a", token::Plain),
token::Whitespace,
mk_ident("b", token::Plain)]);
}

#[test] fn dcparsing_2 () {
check_tokenization(setup(&mk_sh(), "a::b".to_string()),
vec!(mk_ident("a",true),
token::ModSep,
mk_ident("b",false)));
vec![mk_ident("a",token::ModName),
token::ModSep,
mk_ident("b", token::Plain)]);
}

#[test] fn dcparsing_3 () {
check_tokenization(setup(&mk_sh(), "a ::b".to_string()),
vec!(mk_ident("a",false),
token::Whitespace,
token::ModSep,
mk_ident("b",false)));
vec![mk_ident("a", token::Plain),
token::Whitespace,
token::ModSep,
mk_ident("b", token::Plain)]);
}

#[test] fn dcparsing_4 () {
check_tokenization(setup(&mk_sh(), "a:: b".to_string()),
vec!(mk_ident("a",true),
token::ModSep,
token::Whitespace,
mk_ident("b",false)));
vec![mk_ident("a",token::ModName),
token::ModSep,
token::Whitespace,
mk_ident("b", token::Plain)]);
}

#[test] fn character_a() {
Expand Down
18 changes: 9 additions & 9 deletions src/libsyntax/parse/mod.rs
Expand Up @@ -793,9 +793,9 @@ mod test {
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
let tts: &[ast::TokenTree] = tts.as_slice();
match tts {
[ast::TtToken(_, token::Ident(name_macro_rules, false)),
[ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)),
ast::TtToken(_, token::Not),
ast::TtToken(_, token::Ident(name_zip, false)),
ast::TtToken(_, token::Ident(name_zip, token::Plain)),
ast::TtDelimited(_, ref macro_delimed)]
if name_macro_rules.as_str() == "macro_rules"
&& name_zip.as_str() == "zip" => {
Expand All @@ -810,7 +810,7 @@ mod test {
match (first_open, first_tts.as_slice(), first_close) {
(&ast::Delimiter { token: token::LParen, .. },
[ast::TtToken(_, token::Dollar),
ast::TtToken(_, token::Ident(name, false))],
ast::TtToken(_, token::Ident(name, token::Plain))],
&ast::Delimiter { token: token::RParen, .. })
if name.as_str() == "a" => {},
_ => fail!("value 3: {}", **first_delimed),
Expand All @@ -819,7 +819,7 @@ mod test {
match (second_open, second_tts.as_slice(), second_close) {
(&ast::Delimiter { token: token::LParen, .. },
[ast::TtToken(_, token::Dollar),
ast::TtToken(_, token::Ident(name, false))],
ast::TtToken(_, token::Ident(name, token::Plain))],
&ast::Delimiter { token: token::RParen, .. })
if name.as_str() == "a" => {},
_ => fail!("value 4: {}", **second_delimed),
Expand All @@ -845,7 +845,7 @@ mod test {
\"variant\":\"Ident\",\
\"fields\":[\
\"fn\",\
false\
\"Plain\"\
]\
}\
]\
Expand All @@ -858,7 +858,7 @@ mod test {
\"variant\":\"Ident\",\
\"fields\":[\
\"a\",\
false\
\"Plain\"\
]\
}\
]\
Expand All @@ -881,7 +881,7 @@ mod test {
\"variant\":\"Ident\",\
\"fields\":[\
\"b\",\
false\
\"Plain\"\
]\
}\
]\
Expand All @@ -901,7 +901,7 @@ mod test {
\"variant\":\"Ident\",\
\"fields\":[\
\"int\",\
false\
\"Plain\"\
]\
}\
]\
Expand Down Expand Up @@ -932,7 +932,7 @@ mod test {
\"variant\":\"Ident\",\
\"fields\":[\
\"b\",\
false\
\"Plain\"\
]\
}\
]\
Expand Down
12 changes: 6 additions & 6 deletions src/libsyntax/parse/parser.rs
Expand Up @@ -2067,10 +2067,10 @@ impl<'a> Parser<'a> {
},
// FIXME #13626: Should be able to stick in
// token::SELF_KEYWORD_NAME
token::Ident(id @ ast::Ident{
name: ast::Name(token::SELF_KEYWORD_NAME_NUM),
ctxt: _
} ,false) => {
token::Ident(id @ ast::Ident {
name: ast::Name(token::SELF_KEYWORD_NAME_NUM),
ctxt: _
}, token::Plain) => {
self.bump();
let path = ast_util::ident_to_path(mk_sp(lo, hi), id);
ex = ExprPath(path);
Expand Down Expand Up @@ -4094,14 +4094,14 @@ impl<'a> Parser<'a> {

fn is_self_ident(&mut self) -> bool {
match self.token {
token::Ident(id, false) => id.name == special_idents::self_.name,
token::Ident(id, token::Plain) => id.name == special_idents::self_.name,
_ => false
}
}

fn expect_self_ident(&mut self) -> ast::Ident {
match self.token {
token::Ident(id, false) if id.name == special_idents::self_.name => {
token::Ident(id, token::Plain) if id.name == special_idents::self_.name => {
self.bump();
id
},
Expand Down
48 changes: 35 additions & 13 deletions src/libsyntax/parse/token.rs
Expand Up @@ -98,6 +98,21 @@ pub enum BinOpToken {
Shr,
}

#[cfg(stage0)]
#[allow(non_uppercase_statics)]
pub const ModName: bool = true;
#[cfg(stage0)]
#[allow(non_uppercase_statics)]
pub const Plain: bool = false;

#[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)]
#[cfg(not(stage0))]
pub enum IdentStyle {
/// `::` follows the identifier with no whitespace in-between.
ModName,
Plain,
}

#[allow(non_camel_case_types)]
#[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)]
pub enum Token {
Expand Down Expand Up @@ -149,10 +164,10 @@ pub enum Token {
LitBinaryRaw(ast::Name, uint), /* raw binary str delimited by n hash symbols */

/* Name components */
/// An identifier contains an "is_mod_name" boolean,
/// indicating whether :: follows this token with no
/// whitespace in between.
#[cfg(stage0)]
Ident(ast::Ident, bool),
#[cfg(not(stage0))]
Ident(ast::Ident, IdentStyle),
Underscore,
Lifetime(ast::Ident),

Expand Down Expand Up @@ -252,10 +267,11 @@ impl Token {

/// Returns `true` if the token is a path that is not followed by a `::`
/// token.
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
pub fn is_plain_ident(&self) -> bool {
match *self {
Ident(_, false) => true,
_ => false,
Ident(_, Plain) => true,
_ => false,
}
}

Expand Down Expand Up @@ -299,18 +315,20 @@ impl Token {
}

/// Returns `true` if the token is a given keyword, `kw`.
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
match *self {
Ident(sid, false) => kw.to_name() == sid.name,
_ => false,
Ident(sid, Plain) => kw.to_name() == sid.name,
_ => false,
}
}

/// Returns `true` if the token is either a special identifier, or a strict
/// or reserved keyword.
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
pub fn is_any_keyword(&self) -> bool {
match *self {
Ident(sid, false) => {
Ident(sid, Plain) => {
let n = sid.name;

n == SELF_KEYWORD_NAME
Expand All @@ -324,9 +342,10 @@ impl Token {
}

/// Returns `true` if the token may not appear as an identifier.
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
pub fn is_strict_keyword(&self) -> bool {
match *self {
Ident(sid, false) => {
Ident(sid, Plain) => {
let n = sid.name;

n == SELF_KEYWORD_NAME
Expand All @@ -335,7 +354,7 @@ impl Token {
|| STRICT_KEYWORD_START <= n
&& n <= STRICT_KEYWORD_FINAL
},
Ident(sid, true) => {
Ident(sid, ModName) => {
let n = sid.name;

n != SELF_KEYWORD_NAME
Expand All @@ -349,9 +368,10 @@ impl Token {

/// Returns `true` if the token is a keyword that has been reserved for
/// possible future use.
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
pub fn is_reserved_keyword(&self) -> bool {
match *self {
Ident(sid, false) => {
Ident(sid, Plain) => {
let n = sid.name;

RESERVED_KEYWORD_START <= n
Expand Down Expand Up @@ -382,8 +402,10 @@ pub enum Nonterminal {
NtPat( P<ast::Pat>),
NtExpr( P<ast::Expr>),
NtTy( P<ast::Ty>),
/// See IDENT, above, for meaning of bool in NtIdent:
#[cfg(stage0)]
NtIdent(Box<ast::Ident>, bool),
#[cfg(not(stage0))]
NtIdent(Box<ast::Ident>, IdentStyle),
/// Stuff inside brackets for attributes
NtMeta( P<ast::MetaItem>),
NtPath(Box<ast::Path>),
Expand Down Expand Up @@ -857,6 +879,6 @@ mod test {
assert!(Gt.mtwt_eq(&Gt));
let a = str_to_ident("bac");
let a1 = mark_ident(a,92);
assert!(Ident(a,true).mtwt_eq(&Ident(a1,false)));
assert!(Ident(a, ModName).mtwt_eq(&Ident(a1, Plain)));
}
}

0 comments on commit cd04959

Please sign in to comment.