diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index a309775a1a40b..d03563f8891aa 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -472,7 +472,7 @@ impl MetaItem { Token::from_ast_ident(segment.ident)).into()); last_pos = segment.ident.span.hi(); } - idents.push(self.node.tokens(self.span)); + self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents); TokenStream::new(idents) } @@ -529,7 +529,9 @@ impl MetaItemKind { match *self { MetaItemKind::Word => TokenStream::empty(), MetaItemKind::NameValue(ref lit) => { - TokenStream::new(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()]) + let mut vec = vec![TokenTree::Token(span, Token::Eq).into()]; + lit.tokens().append_to_tree_and_joint_vec(&mut vec); + TokenStream::new(vec) } MetaItemKind::List(ref list) => { let mut tokens = Vec::new(); @@ -537,7 +539,7 @@ impl MetaItemKind { if i > 0 { tokens.push(TokenTree::Token(span, Token::Comma).into()); } - tokens.push(item.node.tokens()); + item.node.tokens().append_to_tree_and_joint_vec(&mut tokens); } TokenTree::Delimited( DelimSpan::from_single(span), diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index ad8668aca70f9..c3124144009ab 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -233,7 +233,7 @@ pub mod rt { self.span, token::Token::from_ast_ident(segment.ident) ).into()); } - inner.push(self.tokens.clone()); + self.tokens.clone().append_to_tree_and_joint_vec(&mut inner); let delim_span = DelimSpan::from_single(self.span); r.push(TokenTree::Delimited( diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 31d87508c6bca..0ef2d3b749d81 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -7,7 +7,7 @@ use fold::noop_fold_tt; use parse::token::{self, Token, NtTT}; use smallvec::SmallVec; use syntax_pos::DUMMY_SP; -use tokenstream::{TokenStream, TokenTree, DelimSpan}; +use tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint}; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::Lrc; @@ -63,7 +63,7 @@ pub fn transcribe(cx: &ExtCtxt, let mut stack: SmallVec<[Frame; 1]> = smallvec![Frame::new(src)]; let interpolations = interp.unwrap_or_else(FxHashMap::default); /* just a convenience */ let mut repeats = Vec::new(); - let mut result: Vec = Vec::new(); + let mut result: Vec = Vec::new(); let mut result_stack = Vec::new(); loop { @@ -78,7 +78,7 @@ pub fn transcribe(cx: &ExtCtxt, if let Some(sep) = sep.clone() { // repeat same span, I guess let prev_span = match result.last() { - Some(stream) => stream.trees().next().unwrap().span(), + Some((tt, _)) => tt.span(), None => DUMMY_SP, }; result.push(TokenTree::Token(prev_span, sep).into()); diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 6c4e9e1c940c5..d219f29f06c20 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -1,7 +1,7 @@ use print::pprust::token_to_string; use parse::lexer::StringReader; use parse::{token, PResult}; -use tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree}; +use tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint}; impl<'a> StringReader<'a> { // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`. @@ -33,7 +33,7 @@ impl<'a> StringReader<'a> { } } - fn parse_token_tree(&mut self) -> PResult<'a, TokenStream> { + fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> { let sm = self.sess.source_map(); match self.token { token::Eof => { @@ -156,7 +156,7 @@ impl<'a> StringReader<'a> { Ok(TokenTree::Delimited( delim_span, delim, - tts.into(), + tts.into() ).into()) }, token::CloseDelim(_) => { @@ -176,7 +176,7 @@ impl<'a> StringReader<'a> { let raw = self.span_src_raw; self.real_token(); let is_joint = raw.hi() == self.span_src_raw.lo() && token::is_op(&self.token); - Ok(TokenStream::Tree(tt, if is_joint { Joint } else { NonJoint })) + Ok((tt, if is_joint { Joint } else { NonJoint })) } } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 1e4a26b353759..eababf58dfa4c 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2914,7 +2914,7 @@ impl<'a> Parser<'a> { TokenTree::Delimited( frame.span, frame.delim, - frame.tree_cursor.original_stream().into(), + frame.tree_cursor.stream.into(), ) }, token::CloseDelim(_) | token::Eof => unreachable!(), diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 4b33a715c5c89..fb72ef9c956ce 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -147,9 +147,11 @@ impl TokenTree { pub enum TokenStream { Empty, Tree(TokenTree, IsJoint), - Stream(Lrc>), + Stream(Lrc>), } +pub type TreeAndJoint = (TokenTree, IsJoint); + // `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger. #[cfg(target_arch = "x86_64")] static_assert!(MEM_SIZE_OF_TOKEN_STREAM: mem::size_of::() == 32); @@ -173,16 +175,14 @@ impl TokenStream { while let Some((pos, ts)) = iter.next() { if let Some((_, next)) = iter.peek() { let sp = match (&ts, &next) { - (TokenStream::Tree(TokenTree::Token(_, token::Token::Comma), NonJoint), _) | - (_, TokenStream::Tree(TokenTree::Token(_, token::Token::Comma), NonJoint)) - => continue, - (TokenStream::Tree(TokenTree::Token(sp, _), NonJoint), _) => *sp, - (TokenStream::Tree(TokenTree::Delimited(sp, ..), NonJoint), _) => - sp.entire(), + ((TokenTree::Token(_, token::Token::Comma), NonJoint), _) | + (_, (TokenTree::Token(_, token::Token::Comma), NonJoint)) => continue, + ((TokenTree::Token(sp, _), NonJoint), _) => *sp, + ((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(), _ => continue, }; let sp = sp.shrink_to_hi(); - let comma = TokenStream::Tree(TokenTree::Token(sp, token::Comma), NonJoint); + let comma = (TokenTree::Token(sp, token::Comma), NonJoint); suggestion = Some((pos, comma, sp)); } } @@ -200,8 +200,14 @@ impl TokenStream { } impl From for TokenStream { - fn from(tt: TokenTree) -> TokenStream { - TokenStream::Tree(tt, NonJoint) + fn from(tree: TokenTree) -> TokenStream { + TokenStream::Tree(tree, NonJoint) + } +} + +impl From for TreeAndJoint { + fn from(tree: TokenTree) -> TreeAndJoint { + (tree, NonJoint) } } @@ -213,56 +219,7 @@ impl From for TokenStream { impl> iter::FromIterator for TokenStream { fn from_iter>(iter: I) -> Self { - TokenStream::new(iter.into_iter().map(Into::into).collect::>()) - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, iter: I) { - let iter = iter.into_iter(); - let this = mem::replace(self, TokenStream::Empty); - - // Vector of token streams originally in self. - let tts: Vec = match this { - TokenStream::Empty => { - let mut vec = Vec::new(); - vec.reserve(iter.size_hint().0); - vec - } - TokenStream::Tree(..) => { - let mut vec = Vec::new(); - vec.reserve(1 + iter.size_hint().0); - vec.push(this); - vec - } - TokenStream::Stream(rc_vec) => match Lrc::try_unwrap(rc_vec) { - Ok(mut vec) => { - // Extend in place using the existing capacity if possible. - // This is the fast path for libraries like `quote` that - // build a token stream. - vec.reserve(iter.size_hint().0); - vec - } - Err(rc_vec) => { - // Self is shared so we need to copy and extend that. - let mut vec = Vec::new(); - vec.reserve(rc_vec.len() + iter.size_hint().0); - vec.extend_from_slice(&rc_vec); - vec - } - } - }; - - // Perform the extend, joining tokens as needed along the way. - let mut builder = TokenStreamBuilder(tts); - for stream in iter { - builder.push(stream); - } - - // Build the resulting token stream. If it contains more than one token, - // preserve capacity in the vector in anticipation of the caller - // performing additional calls to extend. - *self = TokenStream::new(builder.0); + TokenStream::from_streams(iter.into_iter().map(Into::into).collect::>()) } } @@ -294,14 +251,43 @@ impl TokenStream { } } - pub fn new(mut streams: Vec) -> TokenStream { + fn from_streams(mut streams: Vec) -> TokenStream { match streams.len() { 0 => TokenStream::empty(), 1 => streams.pop().unwrap(), + _ => { + let mut vec = vec![]; + for stream in streams { + match stream { + TokenStream::Empty => {}, + TokenStream::Tree(tree, is_joint) => vec.push((tree, is_joint)), + TokenStream::Stream(stream2) => vec.extend(stream2.iter().cloned()), + } + } + TokenStream::new(vec) + } + } + } + + pub fn new(mut streams: Vec) -> TokenStream { + match streams.len() { + 0 => TokenStream::empty(), + 1 => { + let (tree, is_joint) = streams.pop().unwrap(); + TokenStream::Tree(tree, is_joint) + } _ => TokenStream::Stream(Lrc::new(streams)), } } + pub fn append_to_tree_and_joint_vec(self, vec: &mut Vec) { + match self { + TokenStream::Empty => {} + TokenStream::Tree(tree, is_joint) => vec.push((tree, is_joint)), + TokenStream::Stream(stream) => vec.extend(stream.iter().cloned()), + } + } + pub fn trees(&self) -> Cursor { self.clone().into_trees() } @@ -362,54 +348,58 @@ impl TokenStream { t1.next().is_none() && t2.next().is_none() } - /// Precondition: `self` consists of a single token tree. - /// Returns true if the token tree is a joint operation w.r.t. `proc_macro::TokenNode`. - pub fn as_tree(self) -> (TokenTree, bool /* joint? */) { - match self { - TokenStream::Tree(tree, is_joint) => (tree, is_joint == Joint), - _ => unreachable!(), - } - } - pub fn map_enumerated TokenTree>(self, mut f: F) -> TokenStream { - let mut trees = self.into_trees(); - let mut result = Vec::new(); - let mut i = 0; - while let Some(stream) = trees.next_as_stream() { - result.push(match stream { - TokenStream::Tree(tree, is_joint) => TokenStream::Tree(f(i, tree), is_joint), - _ => unreachable!() - }); - i += 1; + match self { + TokenStream::Empty => TokenStream::Empty, + TokenStream::Tree(tree, is_joint) => TokenStream::Tree(f(0, tree), is_joint), + TokenStream::Stream(stream) => TokenStream::Stream(Lrc::new( + stream + .iter() + .enumerate() + .map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint)) + .collect() + )), } - TokenStream::new(result) } pub fn map TokenTree>(self, mut f: F) -> TokenStream { - let mut trees = self.into_trees(); - let mut result = Vec::new(); - while let Some(stream) = trees.next_as_stream() { - result.push(match stream { - TokenStream::Tree(tree, is_joint) => TokenStream::Tree(f(tree), is_joint), - _ => unreachable!() - }); + match self { + TokenStream::Empty => TokenStream::Empty, + TokenStream::Tree(tree, is_joint) => TokenStream::Tree(f(tree), is_joint), + TokenStream::Stream(stream) => TokenStream::Stream(Lrc::new( + stream + .iter() + .map(|(tree, is_joint)| (f(tree.clone()), *is_joint)) + .collect() + )), } - TokenStream::new(result) } fn first_tree_and_joint(&self) -> Option<(TokenTree, IsJoint)> { match self { TokenStream::Empty => None, TokenStream::Tree(ref tree, is_joint) => Some((tree.clone(), *is_joint)), - TokenStream::Stream(ref stream) => stream.first().unwrap().first_tree_and_joint(), + TokenStream::Stream(ref stream) => Some(stream.first().unwrap().clone()) } } fn last_tree_if_joint(&self) -> Option { match self { - TokenStream::Empty | TokenStream::Tree(_, NonJoint) => None, - TokenStream::Tree(ref tree, Joint) => Some(tree.clone()), - TokenStream::Stream(ref stream) => stream.last().unwrap().last_tree_if_joint(), + TokenStream::Empty => None, + TokenStream::Tree(ref tree, is_joint) => { + if *is_joint == Joint { + Some(tree.clone()) + } else { + None + } + } + TokenStream::Stream(ref stream) => { + if let (tree, Joint) = stream.last().unwrap() { + Some(tree.clone()) + } else { + None + } + } } } } @@ -442,13 +432,8 @@ impl TokenStreamBuilder { self.0.push(stream); } - pub fn add>(mut self, stream: T) -> Self { - self.push(stream); - self - } - pub fn build(self) -> TokenStream { - TokenStream::new(self.0) + TokenStream::from_streams(self.0) } fn push_all_but_last_tree(&mut self, stream: &TokenStream) { @@ -456,10 +441,9 @@ impl TokenStreamBuilder { let len = streams.len(); match len { 1 => {} - 2 => self.0.push(streams[0].clone().into()), - _ => self.0.push(TokenStream::new(streams[0 .. len - 1].to_vec())), + 2 => self.0.push(TokenStream::Tree(streams[0].0.clone(), streams[0].1)), + _ => self.0.push(TokenStream::Stream(Lrc::new(streams[0 .. len - 1].to_vec()))), } - self.push_all_but_last_tree(&streams[len - 1]) } } @@ -468,154 +452,77 @@ impl TokenStreamBuilder { let len = streams.len(); match len { 1 => {} - 2 => self.0.push(streams[1].clone().into()), - _ => self.0.push(TokenStream::new(streams[1 .. len].to_vec())), + 2 => self.0.push(TokenStream::Tree(streams[1].0.clone(), streams[1].1)), + _ => self.0.push(TokenStream::Stream(Lrc::new(streams[1 .. len].to_vec()))), } - self.push_all_but_first_tree(&streams[0]) } } } #[derive(Clone)] -pub struct Cursor(CursorKind); - -#[derive(Clone)] -enum CursorKind { - Empty, - Tree(TokenTree, IsJoint, bool /* consumed? */), - Stream(StreamCursor), -} - -#[derive(Clone)] -struct StreamCursor { - stream: Lrc>, +pub struct Cursor { + pub stream: TokenStream, index: usize, - stack: Vec<(Lrc>, usize)>, -} - -impl StreamCursor { - fn new(stream: Lrc>) -> Self { - StreamCursor { stream: stream, index: 0, stack: Vec::new() } - } - - fn next_as_stream(&mut self) -> Option { - loop { - if self.index < self.stream.len() { - self.index += 1; - let next = self.stream[self.index - 1].clone(); - match next { - TokenStream::Empty => {} - TokenStream::Tree(..) => return Some(next), - TokenStream::Stream(stream) => self.insert(stream), - } - } else if let Some((stream, index)) = self.stack.pop() { - self.stream = stream; - self.index = index; - } else { - return None; - } - } - } - - fn insert(&mut self, stream: Lrc>) { - self.stack.push((mem::replace(&mut self.stream, stream), - mem::replace(&mut self.index, 0))); - } } impl Iterator for Cursor { type Item = TokenTree; fn next(&mut self) -> Option { - self.next_as_stream().map(|stream| match stream { - TokenStream::Tree(tree, _) => tree, - _ => unreachable!() - }) + self.next_with_joint().map(|(tree, _)| tree) } } impl Cursor { fn new(stream: TokenStream) -> Self { - Cursor(match stream { - TokenStream::Empty => CursorKind::Empty, - TokenStream::Tree(tree, is_joint) => CursorKind::Tree(tree, is_joint, false), - TokenStream::Stream(stream) => CursorKind::Stream(StreamCursor::new(stream)), - }) - } - - pub fn next_as_stream(&mut self) -> Option { - let (stream, consumed) = match self.0 { - CursorKind::Tree(ref tree, ref is_joint, ref mut consumed @ false) => - (TokenStream::Tree(tree.clone(), *is_joint), consumed), - CursorKind::Stream(ref mut cursor) => return cursor.next_as_stream(), - _ => return None, - }; - - *consumed = true; - Some(stream) + Cursor { stream, index: 0 } } - pub fn insert(&mut self, stream: TokenStream) { - match self.0 { - _ if stream.is_empty() => return, - CursorKind::Empty => *self = stream.trees(), - CursorKind::Tree(_, _, consumed) => { - *self = TokenStream::new(vec![self.original_stream(), stream]).trees(); - if consumed { - self.next(); + pub fn next_with_joint(&mut self) -> Option { + match self.stream { + TokenStream::Empty => None, + TokenStream::Tree(ref tree, ref is_joint) => { + if self.index == 0 { + self.index = 1; + Some((tree.clone(), *is_joint)) + } else { + None } } - CursorKind::Stream(ref mut cursor) => { - cursor.insert(ThinTokenStream::from(stream).0.unwrap()); + TokenStream::Stream(ref stream) => { + if self.index < stream.len() { + self.index += 1; + Some(stream[self.index - 1].clone()) + } else { + None + } } } } - pub fn original_stream(&self) -> TokenStream { - match self.0 { - CursorKind::Empty => TokenStream::empty(), - CursorKind::Tree(ref tree, ref is_joint, _) => - TokenStream::Tree(tree.clone(), *is_joint), - CursorKind::Stream(ref cursor) => TokenStream::Stream( - cursor.stack.get(0).cloned().map(|(stream, _)| stream) - .unwrap_or_else(|| cursor.stream.clone()) - ), + pub fn append(&mut self, new_stream: TokenStream) { + if new_stream.is_empty() { + return; } + let index = self.index; + let stream = mem::replace(&mut self.stream, TokenStream::Empty); + *self = TokenStream::from_streams(vec![stream, new_stream]).into_trees(); + self.index = index; } pub fn look_ahead(&self, n: usize) -> Option { - fn look_ahead(streams: &[TokenStream], mut n: usize) -> Result { - for stream in streams { - n = match stream { - TokenStream::Tree(ref tree, _) if n == 0 => return Ok(tree.clone()), - TokenStream::Tree(..) => n - 1, - TokenStream::Stream(ref stream) => match look_ahead(stream, n) { - Ok(tree) => return Ok(tree), - Err(n) => n, - }, - _ => n, - }; + match self.stream { + TokenStream::Empty => None, + TokenStream::Tree(ref tree, _) => { + if n == 0 && self.index == 0 { + Some(tree.clone()) + } else { + None + } } - Err(n) + TokenStream::Stream(ref stream) => + stream[self.index ..].get(n).map(|(tree, _)| tree.clone()), } - - match self.0 { - CursorKind::Empty | - CursorKind::Tree(_, _, true) => Err(n), - CursorKind::Tree(ref tree, _, false) => look_ahead(&[tree.clone().into()], n), - CursorKind::Stream(ref cursor) => { - look_ahead(&cursor.stream[cursor.index ..], n).or_else(|mut n| { - for &(ref stream, index) in cursor.stack.iter().rev() { - n = match look_ahead(&stream[index..], n) { - Ok(tree) => return Ok(tree), - Err(n) => n, - } - } - - Err(n) - }) - } - }.ok() } } @@ -623,7 +530,7 @@ impl Cursor { /// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`. /// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion. #[derive(Debug, Clone)] -pub struct ThinTokenStream(Option>>); +pub struct ThinTokenStream(Option>>); impl ThinTokenStream { pub fn stream(&self) -> TokenStream { @@ -635,7 +542,7 @@ impl From for ThinTokenStream { fn from(stream: TokenStream) -> ThinTokenStream { ThinTokenStream(match stream { TokenStream::Empty => None, - TokenStream::Tree(..) => Some(Lrc::new(vec![stream])), + TokenStream::Tree(tree, is_joint) => Some(Lrc::new(vec![(tree, is_joint)])), TokenStream::Stream(stream) => Some(stream), }) } @@ -742,7 +649,7 @@ mod tests { let test_res = string_to_ts("foo::bar::baz"); let test_fst = string_to_ts("foo::bar"); let test_snd = string_to_ts("::baz"); - let eq_res = TokenStream::new(vec![test_fst, test_snd]); + let eq_res = TokenStream::from_streams(vec![test_fst, test_snd]); assert_eq!(test_res.trees().count(), 5); assert_eq!(eq_res.trees().count(), 5); assert_eq!(test_res.eq_unspanned(&eq_res), true); @@ -827,107 +734,4 @@ mod tests { assert!(stream.eq_unspanned(&string_to_ts("..."))); assert_eq!(stream.trees().count(), 1); } - - #[test] - fn test_extend_empty() { - with_globals(|| { - // Append a token onto an empty token stream. - let mut stream = TokenStream::empty(); - stream.extend(vec![string_to_ts("t")]); - - let expected = string_to_ts("t"); - assert!(stream.eq_unspanned(&expected)); - }); - } - - #[test] - fn test_extend_nothing() { - with_globals(|| { - // Append nothing onto a token stream containing one token. - let mut stream = string_to_ts("t"); - stream.extend(vec![]); - - let expected = string_to_ts("t"); - assert!(stream.eq_unspanned(&expected)); - }); - } - - #[test] - fn test_extend_single() { - with_globals(|| { - // Append a token onto token stream containing a single token. - let mut stream = string_to_ts("t1"); - stream.extend(vec![string_to_ts("t2")]); - - let expected = string_to_ts("t1 t2"); - assert!(stream.eq_unspanned(&expected)); - }); - } - - #[test] - fn test_extend_in_place() { - with_globals(|| { - // Append a token onto token stream containing a reference counted - // vec of tokens. The token stream has a reference count of 1 so - // this can happen in place. - let mut stream = string_to_ts("t1 t2"); - stream.extend(vec![string_to_ts("t3")]); - - let expected = string_to_ts("t1 t2 t3"); - assert!(stream.eq_unspanned(&expected)); - }); - } - - #[test] - fn test_extend_copy() { - with_globals(|| { - // Append a token onto token stream containing a reference counted - // vec of tokens. The token stream is shared so the extend takes - // place on a copy. - let mut stream = string_to_ts("t1 t2"); - let _incref = stream.clone(); - stream.extend(vec![string_to_ts("t3")]); - - let expected = string_to_ts("t1 t2 t3"); - assert!(stream.eq_unspanned(&expected)); - }); - } - - #[test] - fn test_extend_no_join() { - with_globals(|| { - let first = TokenTree::Token(DUMMY_SP, Token::Dot); - let second = TokenTree::Token(DUMMY_SP, Token::Dot); - - // Append a dot onto a token stream containing a dot, but do not - // join them. - let mut stream = TokenStream::from(first); - stream.extend(vec![TokenStream::from(second)]); - - let expected = string_to_ts(". ."); - assert!(stream.eq_unspanned(&expected)); - - let unexpected = string_to_ts(".."); - assert!(!stream.eq_unspanned(&unexpected)); - }); - } - - #[test] - fn test_extend_join() { - with_globals(|| { - let first = TokenTree::Token(DUMMY_SP, Token::Dot).joint(); - let second = TokenTree::Token(DUMMY_SP, Token::Dot); - - // Append a dot onto a token stream containing a dot, forming a - // dotdot. - let mut stream = first; - stream.extend(vec![TokenStream::from(second)]); - - let expected = string_to_ts(".."); - assert!(stream.eq_unspanned(&expected)); - - let unexpected = string_to_ts(". ."); - assert!(!stream.eq_unspanned(&unexpected)); - }); - } } diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index afd86a4f7465c..158cbc791ef50 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -11,7 +11,7 @@ use syntax::ast; use syntax::ext::base::ExtCtxt; use syntax::parse::lexer::comments; use syntax::parse::{self, token, ParseSess}; -use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream}; +use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint}; use syntax_pos::hygiene::{SyntaxContext, Transparency}; use syntax_pos::symbol::{keywords, Symbol}; use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span}; @@ -46,13 +46,14 @@ impl ToInternal for Delimiter { } } -impl FromInternal<(TokenStream, &'_ ParseSess, &'_ mut Vec)> +impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> for TokenTree { - fn from_internal((stream, sess, stack): (TokenStream, &ParseSess, &mut Vec)) -> Self { + fn from_internal(((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec)) + -> Self { use syntax::parse::token::*; - let (tree, joint) = stream.as_tree(); + let joint = is_joint == Joint; let (span, token) = match tree { tokenstream::TokenTree::Delimited(span, delim, tts) => { let delimiter = Delimiter::from_internal(delim); @@ -450,7 +451,7 @@ impl server::TokenStreamIter for Rustc<'_> { ) -> Option> { loop { let tree = iter.stack.pop().or_else(|| { - let next = iter.cursor.next_as_stream()?; + let next = iter.cursor.next_with_joint()?; Some(TokenTree::from_internal((next, self.sess, &mut iter.stack))) })?; // HACK: The condition "dummy span + group with empty delimiter" represents an AST @@ -461,7 +462,7 @@ impl server::TokenStreamIter for Rustc<'_> { // and not doing the roundtrip through AST. if let TokenTree::Group(ref group) = tree { if group.delimiter == Delimiter::None && group.span.entire().is_dummy() { - iter.cursor.insert(group.stream.clone()); + iter.cursor.append(group.stream.clone()); continue; } }