Skip to content

Commit

Permalink
Auto merge of #60763 - matklad:tt-parser, r=petrochenkov
Browse files Browse the repository at this point in the history
Move token tree related lexer state to a separate struct

Just a types-based refactoring.

We only used a bunch of fields when tokenizing into a token tree, so let's move them out of the base lexer
  • Loading branch information
bors committed May 16, 2019
2 parents 49d139c + e249f2e commit 024c25d
Show file tree
Hide file tree
Showing 3 changed files with 62 additions and 43 deletions.
26 changes: 1 addition & 25 deletions src/libsyntax/parse/lexer/mod.rs
Expand Up @@ -62,19 +62,7 @@ pub struct StringReader<'a> {
// cache a direct reference to the source text, so that we don't have to
// retrieve it via `self.source_file.src.as_ref().unwrap()` all the time.
src: Lrc<String>,
token: token::Token,
span: Span,
/// The raw source span which *does not* take `override_span` into account
span_src_raw: Span,
/// Stack of open delimiters and their spans. Used for error message.
open_braces: Vec<(token::DelimToken, Span)>,
crate unmatched_braces: Vec<UnmatchedBrace>,
/// The type and spans for all braces
///
/// Used only for error recovery when arriving to EOF with mismatched braces.
matching_delim_spans: Vec<(token::DelimToken, Span, Span)>,
crate override_span: Option<Span>,
last_unclosed_found_span: Option<Span>,
override_span: Option<Span>,
}

impl<'a> StringReader<'a> {
Expand Down Expand Up @@ -121,8 +109,6 @@ impl<'a> StringReader<'a> {
sp: self.peek_span,
};
self.advance_token()?;
self.span_src_raw = self.peek_span_src_raw;

Ok(ret_val)
}

Expand Down Expand Up @@ -159,9 +145,6 @@ impl<'a> StringReader<'a> {
}
}

self.token = t.tok.clone();
self.span = t.sp;

Ok(t)
}

Expand Down Expand Up @@ -251,14 +234,7 @@ impl<'a> StringReader<'a> {
peek_span_src_raw: syntax_pos::DUMMY_SP,
src,
fatal_errs: Vec::new(),
token: token::Eof,
span: syntax_pos::DUMMY_SP,
span_src_raw: syntax_pos::DUMMY_SP,
open_braces: Vec::new(),
unmatched_braces: Vec::new(),
matching_delim_spans: Vec::new(),
override_span,
last_unclosed_found_span: None,
}
}

Expand Down
67 changes: 55 additions & 12 deletions src/libsyntax/parse/lexer/tokentrees.rs
@@ -1,13 +1,46 @@
use syntax_pos::Span;

use crate::print::pprust::token_to_string;
use crate::parse::lexer::{StringReader, UnmatchedBrace};
use crate::parse::{token, PResult};
use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};

impl<'a> StringReader<'a> {
crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
let mut tt_reader = TokenTreesReader {
string_reader: self,
token: token::Eof,
span: syntax_pos::DUMMY_SP,
open_braces: Vec::new(),
unmatched_braces: Vec::new(),
matching_delim_spans: Vec::new(),
last_unclosed_found_span: None,
};
let res = tt_reader.parse_all_token_trees();
(res, tt_reader.unmatched_braces)
}
}

struct TokenTreesReader<'a> {
string_reader: StringReader<'a>,
token: token::Token,
span: Span,
/// Stack of open delimiters and their spans. Used for error message.
open_braces: Vec<(token::DelimToken, Span)>,
unmatched_braces: Vec<UnmatchedBrace>,
/// The type and spans for all braces
///
/// Used only for error recovery when arriving to EOF with mismatched braces.
matching_delim_spans: Vec<(token::DelimToken, Span, Span)>,
last_unclosed_found_span: Option<Span>,
}

impl<'a> TokenTreesReader<'a> {
// Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
crate fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
let mut tts = Vec::new();

self.real_token();
while self.token != token::Eof {
tts.push(self.parse_token_tree()?);
}
Expand All @@ -34,25 +67,25 @@ impl<'a> StringReader<'a> {
}

fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
let sm = self.sess.source_map();
let sm = self.string_reader.sess.source_map();
match self.token {
token::Eof => {
let msg = "this file contains an un-closed delimiter";
let mut err = self.sess.span_diagnostic.struct_span_err(self.span, msg);
let mut err = self.string_reader.sess.span_diagnostic
.struct_span_err(self.span, msg);
for &(_, sp) in &self.open_braces {
err.span_label(sp, "un-closed delimiter");
}

if let Some((delim, _)) = self.open_braces.last() {
if let Some((_, open_sp, close_sp)) = self.matching_delim_spans.iter()
.filter(|(d, open_sp, close_sp)| {

if let Some(close_padding) = sm.span_to_margin(*close_sp) {
if let Some(open_padding) = sm.span_to_margin(*open_sp) {
return delim == d && close_padding != open_padding;
if let Some(close_padding) = sm.span_to_margin(*close_sp) {
if let Some(open_padding) = sm.span_to_margin(*open_sp) {
return delim == d && close_padding != open_padding;
}
}
}
false
false
}).next() // these are in reverse order as they get inserted on close, but
{ // we want the last open/first close
err.span_label(
Expand Down Expand Up @@ -164,7 +197,8 @@ impl<'a> StringReader<'a> {
// matching opening delimiter).
let token_str = token_to_string(&self.token);
let msg = format!("unexpected close delimiter: `{}`", token_str);
let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &msg);
let mut err = self.string_reader.sess.span_diagnostic
.struct_span_err(self.span, &msg);
err.span_label(self.span, "unexpected close delimiter");
Err(err)
},
Expand All @@ -173,11 +207,20 @@ impl<'a> StringReader<'a> {
// Note that testing for joint-ness here is done via the raw
// source span as the joint-ness is a property of the raw source
// rather than wanting to take `override_span` into account.
let raw = self.span_src_raw;
// Additionally, we actually check if the *next* pair of tokens
// is joint, but this is equivalent to checking the current pair.
let raw = self.string_reader.peek_span_src_raw;
self.real_token();
let is_joint = raw.hi() == self.span_src_raw.lo() && token::is_op(&self.token);
let is_joint = raw.hi() == self.string_reader.peek_span_src_raw.lo()
&& token::is_op(&self.token);
Ok((tt, if is_joint { Joint } else { NonJoint }))
}
}
}

fn real_token(&mut self) {
let t = self.string_reader.real_token();
self.token = t.tok;
self.span = t.sp;
}
}
12 changes: 6 additions & 6 deletions src/libsyntax/parse/mod.rs
Expand Up @@ -290,22 +290,22 @@ pub fn source_file_to_stream(
}

/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
/// parsing the token tream.
/// parsing the token stream.
pub fn maybe_file_to_stream(
sess: &ParseSess,
source_file: Lrc<SourceFile>,
override_span: Option<Span>,
) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
srdr.real_token();
let srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
let (token_trees, unmatched_braces) = srdr.into_token_trees();

match srdr.parse_all_token_trees() {
Ok(stream) => Ok((stream, srdr.unmatched_braces)),
match token_trees {
Ok(stream) => Ok((stream, unmatched_braces)),
Err(err) => {
let mut buffer = Vec::with_capacity(1);
err.buffer(&mut buffer);
// Not using `emit_unclosed_delims` to use `db.buffer`
for unmatched in srdr.unmatched_braces {
for unmatched in unmatched_braces {
let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
"incorrect close delimiter: `{}`",
token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
Expand Down

0 comments on commit 024c25d

Please sign in to comment.