Skip to content

Commit

Permalink
cleanup lexer constructors
Browse files Browse the repository at this point in the history
  • Loading branch information
matklad committed Jul 4, 2019
1 parent 256df83 commit 601bad8
Show file tree
Hide file tree
Showing 5 changed files with 21 additions and 28 deletions.
22 changes: 11 additions & 11 deletions src/librustdoc/html/highlight.rs
Expand Up @@ -38,17 +38,17 @@ pub fn render_with_highlighting(
FileName::Custom(String::from("rustdoc-highlighting")),
src.to_owned(),
);
let highlight_result =
lexer::StringReader::new_or_buffered_errs(&sess, fm, None).and_then(|lexer| {
let mut classifier = Classifier::new(lexer, sess.source_map());

let mut highlighted_source = vec![];
if classifier.write_source(&mut highlighted_source).is_err() {
Err(classifier.lexer.buffer_fatal_errors())
} else {
Ok(String::from_utf8_lossy(&highlighted_source).into_owned())
}
});
let highlight_result = {
let lexer = lexer::StringReader::new(&sess, fm, None);
let mut classifier = Classifier::new(lexer, sess.source_map());

let mut highlighted_source = vec![];
if classifier.write_source(&mut highlighted_source).is_err() {
Err(classifier.lexer.buffer_fatal_errors())
} else {
Ok(String::from_utf8_lossy(&highlighted_source).into_owned())
}
};

match highlight_result {
Ok(highlighted_source) => {
Expand Down
5 changes: 3 additions & 2 deletions src/librustdoc/passes/check_code_block_syntax.rs
Expand Up @@ -32,7 +32,8 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
dox[code_block.code].to_owned(),
);

let errors = Lexer::new_or_buffered_errs(&sess, source_file, None).and_then(|mut lexer| {
let errors = {
let mut lexer = Lexer::new(&sess, source_file, None);
while let Ok(token::Token { kind, .. }) = lexer.try_next_token() {
if kind == token::Eof {
break;
Expand All @@ -46,7 +47,7 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
} else {
Ok(())
}
});
};

if let Err(errors) = errors {
let mut diag = if let Some(sp) =
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/parse/lexer/comments.rs
Expand Up @@ -346,7 +346,7 @@ pub fn gather_comments(sess: &ParseSess, path: FileName, srdr: &mut dyn Read) ->
srdr.read_to_string(&mut src).unwrap();
let cm = SourceMap::new(sess.source_map().path_mapping().clone());
let source_file = cm.new_source_file(path, src);
let mut rdr = lexer::StringReader::new_raw(sess, source_file, None);
let mut rdr = lexer::StringReader::new(sess, source_file, None);

let mut comments: Vec<Comment> = Vec::new();
let mut code_to_the_left = false; // Only code
Expand Down
18 changes: 5 additions & 13 deletions src/libsyntax/parse/lexer/mod.rs
Expand Up @@ -149,16 +149,15 @@ impl<'a> StringReader<'a> {
buffer
}

/// For comments.rs, which hackily pokes into next_pos and ch
fn new_raw(sess: &'a ParseSess,
pub fn new(sess: &'a ParseSess,
source_file: Lrc<syntax_pos::SourceFile>,
override_span: Option<Span>) -> Self {
let mut sr = StringReader::new_raw_internal(sess, source_file, override_span);
let mut sr = StringReader::new_internal(sess, source_file, override_span);
sr.bump();
sr
}

fn new_raw_internal(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>,
fn new_internal(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>,
override_span: Option<Span>) -> Self
{
if source_file.src.is_none() {
Expand All @@ -181,13 +180,6 @@ impl<'a> StringReader<'a> {
}
}

pub fn new_or_buffered_errs(sess: &'a ParseSess,
source_file: Lrc<syntax_pos::SourceFile>,
override_span: Option<Span>) -> Result<Self, Vec<Diagnostic>> {
let sr = StringReader::new_raw(sess, source_file, override_span);
Ok(sr)
}

pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
let begin = sess.source_map().lookup_byte_offset(span.lo());
let end = sess.source_map().lookup_byte_offset(span.hi());
Expand All @@ -197,7 +189,7 @@ impl<'a> StringReader<'a> {
span = span.shrink_to_lo();
}

let mut sr = StringReader::new_raw_internal(sess, begin.sf, None);
let mut sr = StringReader::new_internal(sess, begin.sf, None);

// Seek the lexer to the right byte range.
sr.next_pos = span.lo();
Expand Down Expand Up @@ -1428,7 +1420,7 @@ mod tests {
teststr: String)
-> StringReader<'a> {
let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
StringReader::new_raw(sess, sf, None)
StringReader::new(sess, sf, None)
}

#[test]
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/parse/mod.rs
Expand Up @@ -305,7 +305,7 @@ pub fn maybe_file_to_stream(
source_file: Lrc<SourceFile>,
override_span: Option<Span>,
) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
let srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
let srdr = lexer::StringReader::new(sess, source_file, override_span);
let (token_trees, unmatched_braces) = srdr.into_token_trees();

match token_trees {
Expand Down

0 comments on commit 601bad8

Please sign in to comment.