Skip to content
Permalink
Browse files

syntax: Switch function parameter order in `TokenTree::token`

  • Loading branch information...
petrochenkov committed Jun 5, 2019
1 parent 350a34f commit 67ce3f458939e6fe073bca6128526cb23f0797ba
@@ -465,10 +465,10 @@ impl MetaItem {
let mod_sep_span = Span::new(last_pos,
segment.ident.span.lo(),
segment.ident.span.ctxt());
idents.push(TokenTree::token(mod_sep_span, token::ModSep).into());
idents.push(TokenTree::token(token::ModSep, mod_sep_span).into());
}
idents.push(TokenTree::token(segment.ident.span,
TokenKind::from_ast_ident(segment.ident)).into());
idents.push(TokenTree::token(TokenKind::from_ast_ident(segment.ident),
segment.ident.span).into());
last_pos = segment.ident.span.hi();
}
self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);
@@ -532,15 +532,15 @@ impl MetaItemKind {
match *self {
MetaItemKind::Word => TokenStream::empty(),
MetaItemKind::NameValue(ref lit) => {
let mut vec = vec![TokenTree::token(span, token::Eq).into()];
let mut vec = vec![TokenTree::token(token::Eq, span).into()];
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
TokenStream::new(vec)
}
MetaItemKind::List(ref list) => {
let mut tokens = Vec::new();
for (i, item) in list.iter().enumerate() {
if i > 0 {
tokens.push(TokenTree::token(span, token::Comma).into());
tokens.push(TokenTree::token(token::Comma, span).into());
}
item.tokens().append_to_tree_and_joint_vec(&mut tokens);
}
@@ -268,8 +268,9 @@ impl<F> TTMacroExpander for F
if let tokenstream::TokenTree::Token(token) = tt {
if let token::Interpolated(nt) = &token.kind {
if let token::NtIdent(ident, is_raw) = **nt {
*tt = tokenstream::TokenTree::token(ident.span,
token::Ident(ident.name, is_raw));
*tt = tokenstream::TokenTree::token(
token::Ident(ident.name, is_raw), ident.span
);
}
}
}
@@ -585,14 +585,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
}
AttrProcMacro(ref mac, ..) => {
self.gate_proc_macro_attr_item(attr.span, &item);
let item_tok = TokenTree::token(DUMMY_SP, token::Interpolated(Lrc::new(match item {
let item_tok = TokenTree::token(token::Interpolated(Lrc::new(match item {
Annotatable::Item(item) => token::NtItem(item),
Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()),
Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()),
Annotatable::Expr(expr) => token::NtExpr(expr),
}))).into();
})), DUMMY_SP).into();
let input = self.extract_proc_macro_attr_input(attr.tokens, attr.span);
let tok_result = mac.expand(self.cx, attr.span, input, item_tok);
let res = self.parse_ast_fragment(tok_result, invoc.fragment_kind,
@@ -270,7 +270,7 @@ pub fn compile(
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
tts: vec![
quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
quoted::TokenTree::token(DUMMY_SP, token::FatArrow),
quoted::TokenTree::token(token::FatArrow, DUMMY_SP),
quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
],
separator: Some(if body.legacy { token::Semi } else { token::Comma }),
@@ -279,7 +279,7 @@ pub fn compile(
})),
// to phase into semicolon-termination instead of semicolon-separation
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
tts: vec![quoted::TokenTree::token(DUMMY_SP, token::Semi)],
tts: vec![quoted::TokenTree::token(token::Semi, DUMMY_SP)],
separator: None,
op: quoted::KleeneOp::ZeroOrMore,
num_captures: 0
@@ -613,7 +613,7 @@ impl FirstSets {

if let (Some(ref sep), true) = (seq_rep.separator.clone(),
subfirst.maybe_empty) {
first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
}

// Reverse scan: Sequence comes before `first`.
@@ -663,7 +663,7 @@ impl FirstSets {

if let (Some(ref sep), true) = (seq_rep.separator.clone(),
subfirst.maybe_empty) {
first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
}

assert!(first.maybe_empty);
@@ -869,7 +869,7 @@ fn check_matcher_core(sess: &ParseSess,
let mut new;
let my_suffix = if let Some(ref u) = seq_rep.separator {
new = suffix_first.clone();
new.add_one_maybe(TokenTree::token(sp.entire(), u.clone()));
new.add_one_maybe(TokenTree::token(u.clone(), sp.entire()));
&new
} else {
&suffix_first
@@ -40,7 +40,7 @@ impl Delimited {
} else {
span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
};
TokenTree::token(open_span, self.open_token())
TokenTree::token(self.open_token(), open_span)
}

/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
@@ -50,7 +50,7 @@ impl Delimited {
} else {
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
};
TokenTree::token(close_span, self.close_token())
TokenTree::token(self.close_token(), close_span)
}
}

@@ -153,7 +153,7 @@ impl TokenTree {
}
}

crate fn token(span: Span, kind: TokenKind) -> TokenTree {
crate fn token(kind: TokenKind, span: Span) -> TokenTree {
TokenTree::Token(Token::new(kind, span))
}
}
@@ -325,7 +325,7 @@ where
let (ident, is_raw) = token.ident().unwrap();
let span = ident.span.with_lo(span.lo());
if ident.name == kw::Crate && !is_raw {
TokenTree::token(span, token::Ident(kw::DollarCrate, is_raw))
TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
} else {
TokenTree::MetaVar(span, ident)
}
@@ -342,7 +342,7 @@ where
}

// There are no more tokens. Just return the `$` we already have.
None => TokenTree::token(span, token::Dollar),
None => TokenTree::token(token::Dollar, span),
},

// `tree` is an arbitrary token. Keep it.
@@ -119,7 +119,7 @@ pub fn transcribe(
Some((tt, _)) => tt.span(),
None => DUMMY_SP,
};
result.push(TokenTree::token(prev_span, sep).into());
result.push(TokenTree::token(sep, prev_span).into());
}
continue;
}
@@ -225,7 +225,7 @@ pub fn transcribe(
result.push(tt.clone().into());
} else {
sp = sp.apply_mark(cx.current_expansion.mark);
let token = TokenTree::token(sp, token::Interpolated(nt.clone()));
let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
result.push(token.into());
}
} else {
@@ -241,8 +241,8 @@ pub fn transcribe(
let ident =
Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
sp = sp.apply_mark(cx.current_expansion.mark);
result.push(TokenTree::token(sp, token::Dollar).into());
result.push(TokenTree::token(sp, token::TokenKind::from_ast_ident(ident)).into());
result.push(TokenTree::token(token::Dollar, sp).into());
result.push(TokenTree::token(token::TokenKind::from_ast_ident(ident), sp).into());
}
}

@@ -157,7 +157,7 @@ impl<'a> Parser<'a> {
self.check(&token::OpenDelim(DelimToken::Brace)) {
self.parse_token_tree().into()
} else if self.eat(&token::Eq) {
let eq = TokenTree::token(self.prev_span, token::Eq);
let eq = TokenTree::token(token::Eq, self.prev_span);
let mut is_interpolated_expr = false;
if let token::Interpolated(nt) = &self.token.kind {
if let token::NtExpr(..) = **nt {
@@ -261,7 +261,7 @@ impl Lit {
token::Bool => token::Ident(self.token.symbol, false),
_ => token::Literal(self.token),
};
TokenTree::token(self.span, token).into()
TokenTree::token(token, self.span).into()
}
}

@@ -476,23 +476,23 @@ mod tests {
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());

let expected = TokenStream::new(vec![
TokenTree::token(sp(0, 2), token::Ident(kw::Fn, false)).into(),
TokenTree::token(sp(3, 4), token::Ident(Name::intern("a"), false)).into(),
TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(),
TokenTree::token(token::Ident(Name::intern("a"), false), sp(3, 4)).into(),
TokenTree::Delimited(
DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
token::DelimToken::Paren,
TokenStream::new(vec![
TokenTree::token(sp(6, 7), token::Ident(Name::intern("b"), false)).into(),
TokenTree::token(sp(8, 9), token::Colon).into(),
TokenTree::token(sp(10, 13), token::Ident(sym::i32, false)).into(),
TokenTree::token(token::Ident(Name::intern("b"), false), sp(6, 7)).into(),
TokenTree::token(token::Colon, sp(8, 9)).into(),
TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(),
]).into(),
).into(),
TokenTree::Delimited(
DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
token::DelimToken::Brace,
TokenStream::new(vec![
TokenTree::token(sp(17, 18), token::Ident(Name::intern("b"), false)).into(),
TokenTree::token(sp(18, 19), token::Semi).into(),
TokenTree::token(token::Ident(Name::intern("b"), false), sp(17, 18)).into(),
TokenTree::token(token::Semi, sp(18, 19)).into(),
]).into(),
).into()
]);
@@ -362,11 +362,11 @@ impl TokenCursor {
delim_span,
token::Bracket,
[
TokenTree::token(sp, token::Ident(sym::doc, false)),
TokenTree::token(sp, token::Eq),
TokenTree::token(sp, token::TokenKind::lit(
TokenTree::token(token::Ident(sym::doc, false), sp),
TokenTree::token(token::Eq, sp),
TokenTree::token(token::TokenKind::lit(
token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
)),
), sp),
]
.iter().cloned().collect::<TokenStream>().into(),
);
@@ -375,10 +375,10 @@ impl TokenCursor {
delim_span,
token::NoDelim,
&if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
[TokenTree::token(sp, token::Pound), TokenTree::token(sp, token::Not), body]
[TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
.iter().cloned().collect::<TokenStream>().into()
} else {
[TokenTree::token(sp, token::Pound), body]
[TokenTree::token(token::Pound, sp), body]
.iter().cloned().collect::<TokenStream>().into()
},
)));
@@ -4344,7 +4344,7 @@ impl<'a> Parser<'a> {
};
TokenStream::new(vec![
args.into(),
TokenTree::token(token_lo.to(self.prev_span), token::FatArrow).into(),
TokenTree::token(token::FatArrow, token_lo.to(self.prev_span)).into(),
body.into(),
])
} else {
@@ -763,10 +763,10 @@ impl Nonterminal {
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
}
Nonterminal::NtIdent(ident, is_raw) => {
Some(TokenTree::token(ident.span, Ident(ident.name, is_raw)).into())
Some(TokenTree::token(Ident(ident.name, is_raw), ident.span).into())
}
Nonterminal::NtLifetime(ident) => {
Some(TokenTree::token(ident.span, Lifetime(ident.name)).into())
Some(TokenTree::token(Lifetime(ident.name), ident.span).into())
}
Nonterminal::NtTT(ref tt) => {
Some(tt.clone().into())
@@ -852,7 +852,7 @@ fn prepend_attrs(sess: &ParseSess,
if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
let ident = attr.path.segments[0].ident;
let token = Ident(ident.name, ident.as_str().starts_with("r#"));
brackets.push(tokenstream::TokenTree::token(ident.span, token));
brackets.push(tokenstream::TokenTree::token(token, ident.span));

// ... and for more complicated paths, fall back to a reparse hack that
// should eventually be removed.
@@ -866,7 +866,7 @@ fn prepend_attrs(sess: &ParseSess,
// The span we list here for `#` and for `[ ... ]` are both wrong in
// that it encompasses more than each token, but it hopefully is "good
// enough" for now at least.
builder.push(tokenstream::TokenTree::token(attr.span, Pound));
builder.push(tokenstream::TokenTree::token(Pound, attr.span));
let delim_span = DelimSpan::from_single(attr.span);
builder.push(tokenstream::TokenTree::Delimited(
delim_span, DelimToken::Bracket, brackets.build().into()));
@@ -138,7 +138,7 @@ impl TokenTree {
TokenStream::new(vec![(self, Joint)])
}

pub fn token(span: Span, kind: TokenKind) -> TokenTree {
pub fn token(kind: TokenKind, span: Span) -> TokenTree {
TokenTree::Token(Token::new(kind, span))
}

@@ -149,7 +149,7 @@ impl TokenTree {
} else {
span.with_hi(span.lo() + BytePos(delim.len() as u32))
};
TokenTree::token(open_span, token::OpenDelim(delim))
TokenTree::token(token::OpenDelim(delim), open_span)
}

/// Returns the closing delimiter as a token tree.
@@ -159,7 +159,7 @@ impl TokenTree {
} else {
span.with_lo(span.hi() - BytePos(delim.len() as u32))
};
TokenTree::token(close_span, token::CloseDelim(delim))
TokenTree::token(token::CloseDelim(delim), close_span)
}
}

@@ -212,7 +212,7 @@ impl TokenStream {
_ => continue,
};
let sp = sp.shrink_to_hi();
let comma = (TokenTree::token(sp, token::Comma), NonJoint);
let comma = (TokenTree::token(token::Comma, sp), NonJoint);
suggestion = Some((pos, comma, sp));
}
}
@@ -433,7 +433,7 @@ impl TokenStreamBuilder {
let last_stream = self.0.pop().unwrap();
self.push_all_but_last_tree(&last_stream);
let glued_span = last_token.span.to(token.span);
let glued_tt = TokenTree::token(glued_span, glued_tok);
let glued_tt = TokenTree::token(glued_tok, glued_span);
let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]);
self.0.push(glued_tokenstream);
self.push_all_but_first_tree(&stream);
@@ -660,7 +660,7 @@ mod tests {
with_default_globals(|| {
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
let test1: TokenStream =
TokenTree::token(sp(0, 1), token::Ident(Name::intern("a"), false)).into();
TokenTree::token(token::Ident(Name::intern("a"), false), sp(0, 1)).into();
let test2 = string_to_ts("foo(bar::baz)");

assert_eq!(test0.is_empty(), true);
@@ -673,9 +673,9 @@ mod tests {
fn test_dotdotdot() {
with_default_globals(|| {
let mut builder = TokenStreamBuilder::new();
builder.push(TokenTree::token(sp(0, 1), token::Dot).joint());
builder.push(TokenTree::token(sp(1, 2), token::Dot).joint());
builder.push(TokenTree::token(sp(2, 3), token::Dot));
builder.push(TokenTree::token(token::Dot, sp(0, 1)).joint());
builder.push(TokenTree::token(token::Dot, sp(1, 2)).joint());
builder.push(TokenTree::token(token::Dot, sp(2, 3)));
let stream = builder.build();
assert!(stream.eq_unspanned(&string_to_ts("...")));
assert_eq!(stream.trees().count(), 1);
@@ -30,11 +30,11 @@ pub fn expand_assert<'cx>(
path: Path::from_ident(Ident::new(sym::panic, sp)),
tts: custom_message.unwrap_or_else(|| {
TokenStream::from(TokenTree::token(
DUMMY_SP,
TokenKind::lit(token::Str, Symbol::intern(&format!(
"assertion failed: {}",
pprust::expr_to_string(&cond_expr).escape_debug()
)), None),
DUMMY_SP,
))
}).into(),
delim: MacDelimiter::Parenthesis,
@@ -69,7 +69,7 @@ impl MultiItemModifier for ProcMacroDerive {
MarkAttrs(&self.attrs).visit_item(&item);

let token = token::Interpolated(Lrc::new(token::NtItem(item)));
let input = tokenstream::TokenTree::token(DUMMY_SP, token).into();
let input = tokenstream::TokenTree::token(token, DUMMY_SP).into();

let server = proc_macro_server::Rustc::new(ecx);
let stream = match self.client.run(&EXEC_STRATEGY, server, input) {

0 comments on commit 67ce3f4

Please sign in to comment.
You can’t perform that action at this time.