Skip to content

Commit

Permalink
Reduce the size of the TokenTree
Browse files Browse the repository at this point in the history
  • Loading branch information
brendanzab committed Oct 25, 2014
1 parent dfb4163 commit 34dacb8
Show file tree
Hide file tree
Showing 7 changed files with 34 additions and 22 deletions.
5 changes: 2 additions & 3 deletions src/libsyntax/ast.rs
Expand Up @@ -629,8 +629,7 @@ pub enum TokenTree {
/// A single token
TtToken(Span, ::parse::token::Token),
/// A delimited sequence of token trees
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
TtDelimited(Span, Delimiter, Rc<Vec<TokenTree>>, Delimiter),
TtDelimited(Span, Rc<(Delimiter, Vec<TokenTree>, Delimiter)>),

// These only make sense for right-hand-sides of MBE macros:

Expand All @@ -649,7 +648,7 @@ impl TokenTree {
pub fn get_span(&self) -> Span {
match *self {
TtToken(span, _) => span,
TtDelimited(span, _, _, _) => span,
TtDelimited(span, _) => span,
TtSequence(span, _, _, _) => span,
TtNonterminal(span, _) => span,
}
Expand Down
3 changes: 2 additions & 1 deletion src/libsyntax/ext/quote.rs
Expand Up @@ -651,7 +651,8 @@ fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
vec!(e_tok));
vec!(cx.stmt_expr(e_push))
},
ast::TtDelimited(sp, ref open, ref tts, ref close) => {
ast::TtDelimited(sp, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
mk_tt(cx, sp, &open.to_tt()).into_iter()
.chain(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()))
.chain(mk_tt(cx, sp, &close.to_tt()).into_iter())
Expand Down
5 changes: 4 additions & 1 deletion src/libsyntax/ext/tt/macro_rules.rs
Expand Up @@ -172,7 +172,10 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
MatchedNonterminal(NtTT(ref tt)) => {
match **tt {
// ignore delimiters
TtDelimited(_, _, ref tts, _) => (**tts).clone(),
TtDelimited(_, ref delimed) => {
let (_, ref tts, _) = **delimed;
tts.clone()
},
_ => cx.span_fatal(sp, "macro rhs must be delimited"),
}
},
Expand Down
13 changes: 9 additions & 4 deletions src/libsyntax/ext/tt/transcribe.rs
Expand Up @@ -128,9 +128,13 @@ impl Add<LockstepIterSize, LockstepIterSize> for LockstepIterSize {

fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
match *t {
// The opening and closing delimiters are both tokens, so they are
// treated as `LisUnconstrained`.
TtDelimited(_, _, ref tts, _) | TtSequence(_, ref tts, _, _) => {
TtDelimited(_, ref delimed) => {
let (_, ref tts, _) = **delimed;
tts.iter().fold(LisUnconstrained, |size, tt| {
size + lockstep_iter_size(tt, r)
})
},
TtSequence(_, ref tts, _, _) => {
tts.iter().fold(LisUnconstrained, |size, tt| {
size + lockstep_iter_size(tt, r)
})
Expand Down Expand Up @@ -202,7 +206,8 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
(*frame.forest)[frame.idx].clone()
};
match t {
TtDelimited(_, open, tts, close) => {
TtDelimited(_, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
let mut forest = Vec::with_capacity(1 + tts.len() + 1);
forest.push(open.to_tt());
forest.extend(tts.iter().map(|x| (*x).clone()));
Expand Down
25 changes: 14 additions & 11 deletions src/libsyntax/fold.rs
Expand Up @@ -571,17 +571,20 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
match *tt {
TtToken(span, ref tok) =>
TtToken(span, fld.fold_token(tok.clone())),
TtDelimited(span, ref open, ref tts, ref close) =>
TtDelimited(span,
Delimiter {
span: open.span,
token: fld.fold_token(open.token.clone())
},
Rc::new(fld.fold_tts(tts.as_slice())),
Delimiter {
span: close.span,
token: fld.fold_token(close.token.clone())
}),
TtDelimited(span, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
TtDelimited(span, Rc::new((
Delimiter {
span: open.span,
token: fld.fold_token(open.token.clone())
},
fld.fold_tts(tts.as_slice()),
Delimiter {
span: close.span,
token: fld.fold_token(close.token.clone())
},
)))
},
TtSequence(span, ref pattern, ref sep, is_optional) =>
TtSequence(span,
Rc::new(fld.fold_tts(pattern.as_slice())),
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/parse/parser.rs
Expand Up @@ -2615,7 +2615,7 @@ impl<'a> Parser<'a> {
// Expand to cover the entire delimited token tree
let span = Span { hi: self.span.hi, ..pre_span };

TtDelimited(span, open, Rc::new(tts), close)
TtDelimited(span, Rc::new((open, tts, close)))
}
_ => parse_non_delim_tt_tok(self)
}
Expand Down
3 changes: 2 additions & 1 deletion src/libsyntax/print/pprust.rs
Expand Up @@ -1020,7 +1020,8 @@ impl<'a> State<'a> {
/// expression arguments as expressions). It can be done! I think.
pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
match *tt {
ast::TtDelimited(_, ref open, ref tts, ref close) => {
ast::TtDelimited(_, ref delimed) => {
let (ref open, ref tts, ref close) = **delimed;
try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice()));
try!(space(&mut self.s));
try!(self.print_tts(tts.as_slice()));
Expand Down

0 comments on commit 34dacb8

Please sign in to comment.