Skip to content

Commit

Permalink
Auto merge of #57004 - nnethercote:TS-change-Stream, r=<try>
Browse files Browse the repository at this point in the history
Make `TokenStream` less recursive.

`TokenStream` is currently recursive in *two* ways:

- the `TokenTree` variant contains a `ThinTokenStream`, which can
  contain a `TokenStream`;

- the `TokenStream` variant contains a `Vec<TokenStream>`.

The latter is not necessary and causes significant complexity. This
commit replaces it with the simpler `Vec<(TokenTree, IsJoint)>`.

This reduces complexity significantly. In particular, `StreamCursor` is
eliminated, and `Cursor` becomes much simpler, consisting now of just a
`TokenStream` and an index.

The commit also removes the `Extend` impl for `TokenStream`, because it
is only used in tests. (The commit also removes those tests.)

Overall, the commit reduces the number of lines of code by almost 200.
  • Loading branch information
bors committed Jan 8, 2019
2 parents 7ad470c + e80a930 commit 7346647
Show file tree
Hide file tree
Showing 7 changed files with 148 additions and 341 deletions.
8 changes: 5 additions & 3 deletions src/libsyntax/attr/mod.rs
Expand Up @@ -472,7 +472,7 @@ impl MetaItem {
Token::from_ast_ident(segment.ident)).into());
last_pos = segment.ident.span.hi();
}
idents.push(self.node.tokens(self.span));
self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);
TokenStream::new(idents)
}

Expand Down Expand Up @@ -529,15 +529,17 @@ impl MetaItemKind {
match *self {
MetaItemKind::Word => TokenStream::empty(),
MetaItemKind::NameValue(ref lit) => {
TokenStream::new(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()])
let mut vec = vec![TokenTree::Token(span, Token::Eq).into()];
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
TokenStream::new(vec)
}
MetaItemKind::List(ref list) => {
let mut tokens = Vec::new();
for (i, item) in list.iter().enumerate() {
if i > 0 {
tokens.push(TokenTree::Token(span, Token::Comma).into());
}
tokens.push(item.node.tokens());
item.node.tokens().append_to_tree_and_joint_vec(&mut tokens);
}
TokenTree::Delimited(
DelimSpan::from_single(span),
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/quote.rs
Expand Up @@ -233,7 +233,7 @@ pub mod rt {
self.span, token::Token::from_ast_ident(segment.ident)
).into());
}
inner.push(self.tokens.clone());
self.tokens.clone().append_to_tree_and_joint_vec(&mut inner);

let delim_span = DelimSpan::from_single(self.span);
r.push(TokenTree::Delimited(
Expand Down
6 changes: 3 additions & 3 deletions src/libsyntax/ext/tt/transcribe.rs
Expand Up @@ -7,7 +7,7 @@ use fold::noop_fold_tt;
use parse::token::{self, Token, NtTT};
use smallvec::SmallVec;
use syntax_pos::DUMMY_SP;
use tokenstream::{TokenStream, TokenTree, DelimSpan};
use tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};

use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
Expand Down Expand Up @@ -63,7 +63,7 @@ pub fn transcribe(cx: &ExtCtxt,
let mut stack: SmallVec<[Frame; 1]> = smallvec![Frame::new(src)];
let interpolations = interp.unwrap_or_else(FxHashMap::default); /* just a convenience */
let mut repeats = Vec::new();
let mut result: Vec<TokenStream> = Vec::new();
let mut result: Vec<TreeAndJoint> = Vec::new();
let mut result_stack = Vec::new();

loop {
Expand All @@ -78,7 +78,7 @@ pub fn transcribe(cx: &ExtCtxt,
if let Some(sep) = sep.clone() {
// repeat same span, I guess
let prev_span = match result.last() {
Some(stream) => stream.trees().next().unwrap().span(),
Some((tt, _)) => tt.span(),
None => DUMMY_SP,
};
result.push(TokenTree::Token(prev_span, sep).into());
Expand Down
8 changes: 4 additions & 4 deletions src/libsyntax/parse/lexer/tokentrees.rs
@@ -1,7 +1,7 @@
use print::pprust::token_to_string;
use parse::lexer::StringReader;
use parse::{token, PResult};
use tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree};
use tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};

impl<'a> StringReader<'a> {
// Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
Expand Down Expand Up @@ -33,7 +33,7 @@ impl<'a> StringReader<'a> {
}
}

fn parse_token_tree(&mut self) -> PResult<'a, TokenStream> {
fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
let sm = self.sess.source_map();
match self.token {
token::Eof => {
Expand Down Expand Up @@ -156,7 +156,7 @@ impl<'a> StringReader<'a> {
Ok(TokenTree::Delimited(
delim_span,
delim,
tts.into(),
tts.into()
).into())
},
token::CloseDelim(_) => {
Expand All @@ -176,7 +176,7 @@ impl<'a> StringReader<'a> {
let raw = self.span_src_raw;
self.real_token();
let is_joint = raw.hi() == self.span_src_raw.lo() && token::is_op(&self.token);
Ok(TokenStream::Tree(tt, if is_joint { Joint } else { NonJoint }))
Ok((tt, if is_joint { Joint } else { NonJoint }))
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/parse/parser.rs
Expand Up @@ -2914,7 +2914,7 @@ impl<'a> Parser<'a> {
TokenTree::Delimited(
frame.span,
frame.delim,
frame.tree_cursor.original_stream().into(),
frame.tree_cursor.stream.into(),
)
},
token::CloseDelim(_) | token::Eof => unreachable!(),
Expand Down

0 comments on commit 7346647

Please sign in to comment.