Skip to content

Commit

Permalink
buffer errors from initial tokenization when parsing
Browse files Browse the repository at this point in the history
  • Loading branch information
QuietMisdreavus committed Nov 1, 2018
1 parent 8a3b5e9 commit 0fe6aae
Show file tree
Hide file tree
Showing 3 changed files with 79 additions and 7 deletions.
15 changes: 11 additions & 4 deletions src/librustdoc/test.rs
Expand Up @@ -422,13 +422,20 @@ pub fn make_test(s: &str,

debug!("about to parse: \n{}", source);

// FIXME(misdreavus): this can still emit a FatalError (and thus halt rustdoc prematurely)
// if there is a lexing error in the first token
let mut parser = parse::new_parser_from_source_str(&sess, filename, source);

let mut found_main = false;
let mut found_extern_crate = cratename.is_none();

let mut parser = match parse::maybe_new_parser_from_source_str(&sess, filename, source) {
Ok(p) => p,
Err(errs) => {
for mut err in errs {
err.cancel();
}

return (found_main, found_extern_crate);
}
};

loop {
match parser.parse_item() {
Ok(Some(item)) => {
Expand Down
23 changes: 22 additions & 1 deletion src/libsyntax/parse/lexer/mod.rs
Expand Up @@ -11,7 +11,7 @@
use ast::{self, Ident};
use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION};
use source_map::{SourceMap, FilePathMapping};
use errors::{Applicability, FatalError, DiagnosticBuilder};
use errors::{Applicability, FatalError, Diagnostic, DiagnosticBuilder};
use parse::{token, ParseSess};
use str::char_at;
use symbol::{Symbol, keywords};
Expand Down Expand Up @@ -175,6 +175,16 @@ impl<'a> StringReader<'a> {
self.fatal_errs.clear();
}

pub fn buffer_fatal_errors(&mut self) -> Vec<Diagnostic> {
let mut buffer = Vec::new();

for err in self.fatal_errs.drain(..) {
err.buffer(&mut buffer);
}

buffer
}

pub fn peek(&self) -> TokenAndSpan {
// FIXME(pcwalton): Bad copy!
TokenAndSpan {
Expand Down Expand Up @@ -251,6 +261,17 @@ impl<'a> StringReader<'a> {
Ok(sr)
}

pub fn new_or_buffered_errs(sess: &'a ParseSess,
source_file: Lrc<syntax_pos::SourceFile>,
override_span: Option<Span>) -> Result<Self, Vec<Diagnostic>> {
let mut sr = StringReader::new_raw(sess, source_file, override_span);
if sr.advance_token().is_err() {
Err(sr.buffer_fatal_errors())
} else {
Ok(sr)
}
}

pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
let begin = sess.source_map().lookup_byte_offset(span.lo());
let end = sess.source_map().lookup_byte_offset(span.hi());
Expand Down
48 changes: 46 additions & 2 deletions src/libsyntax/parse/mod.rs
Expand Up @@ -15,7 +15,7 @@ use ast::{self, CrateConfig, NodeId};
use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
use source_map::{SourceMap, FilePathMapping};
use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
use errors::{Handler, ColorConfig, DiagnosticBuilder};
use errors::{Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
use feature_gate::UnstableFeatures;
use parse::parser::Parser;
use ptr::P;
Expand Down Expand Up @@ -174,14 +174,25 @@ pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &Parse
source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
}

// Create a new parser from a source string
/// Create a new parser from a source string
pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
-> Parser {
let mut parser = source_file_to_parser(sess, sess.source_map().new_source_file(name, source));
parser.recurse_into_file_modules = false;
parser
}

/// Create a new parser from a source string. Returns any buffered errors from lexing the initial
/// token stream.
pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
-> Result<Parser, Vec<Diagnostic>>
{
let mut parser = maybe_source_file_to_parser(sess,
sess.source_map().new_source_file(name, source))?;
parser.recurse_into_file_modules = false;
Ok(parser)
}

/// Create a new parser, handling errors as appropriate
/// if the file doesn't exist
pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> {
Expand Down Expand Up @@ -214,6 +225,21 @@ fn source_file_to_parser(sess: & ParseSess, source_file: Lrc<SourceFile>) -> Par
parser
}

/// Given a source_file and config, return a parser. Returns any buffered errors from lexing the
/// initial token stream.
fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>)
-> Result<Parser, Vec<Diagnostic>>
{
let end_pos = source_file.end_pos;
let mut parser = stream_to_parser(sess, maybe_file_to_stream(sess, source_file, None)?);

if parser.token == token::Eof && parser.span.is_dummy() {
parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
}

Ok(parser)
}

// must preserve old name for now, because quote! from the *existing*
// compiler expands into it
pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser {
Expand Down Expand Up @@ -248,6 +274,24 @@ pub fn source_file_to_stream(sess: &ParseSess,
panictry!(srdr.parse_all_token_trees())
}

/// Given a source file, produce a sequence of token-trees. Returns any buffered errors from
/// parsing the token tream.
pub fn maybe_file_to_stream(sess: &ParseSess,
source_file: Lrc<SourceFile>,
override_span: Option<Span>) -> Result<TokenStream, Vec<Diagnostic>> {
let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
srdr.real_token();

match srdr.parse_all_token_trees() {
Ok(stream) => Ok(stream),
Err(err) => {
let mut buffer = Vec::with_capacity(1);
err.buffer(&mut buffer);
Err(buffer)
}
}
}

/// Given stream and the `ParseSess`, produce a parser
pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser {
Parser::new(sess, stream, None, true, false)
Expand Down

0 comments on commit 0fe6aae

Please sign in to comment.