Skip to content

Commit

Permalink
Eliminate comments::Literal
Browse files Browse the repository at this point in the history
  • Loading branch information
petrochenkov committed May 11, 2019
1 parent 751ae5a commit a5b3f33
Show file tree
Hide file tree
Showing 7 changed files with 61 additions and 210 deletions.
45 changes: 7 additions & 38 deletions src/librustc/hir/print.rs
Expand Up @@ -19,7 +19,6 @@ use std::ascii;
use std::borrow::Cow;
use std::cell::Cell;
use std::io::{self, Write, Read};
use std::iter::Peekable;
use std::vec;

pub enum AnnNode<'a> {
Expand Down Expand Up @@ -77,7 +76,6 @@ pub struct State<'a> {
pub s: pp::Printer<'a>,
cm: Option<&'a SourceMap>,
comments: Option<Vec<comments::Comment>>,
literals: Peekable<vec::IntoIter<comments::Literal>>,
cur_cmnt: usize,
boxes: Vec<pp::Breaks>,
ann: &'a (dyn PpAnn + 'a),
Expand All @@ -99,14 +97,6 @@ impl<'a> PrintState<'a> for State<'a> {
fn cur_cmnt(&mut self) -> &mut usize {
&mut self.cur_cmnt
}

fn cur_lit(&mut self) -> Option<&comments::Literal> {
self.literals.peek()
}

fn bump_lit(&mut self) -> Option<comments::Literal> {
self.literals.next()
}
}

#[allow(non_upper_case_globals)]
Expand All @@ -117,18 +107,16 @@ pub const default_columns: usize = 78;


/// Requires you to pass an input filename and reader so that
/// it can scan the input text for comments and literals to
/// copy forward.
/// it can scan the input text for comments to copy forward.
pub fn print_crate<'a>(cm: &'a SourceMap,
sess: &ParseSess,
krate: &hir::Crate,
filename: FileName,
input: &mut dyn Read,
out: Box<dyn Write + 'a>,
ann: &'a dyn PpAnn,
is_expanded: bool)
ann: &'a dyn PpAnn)
-> io::Result<()> {
let mut s = State::new_from_input(cm, sess, filename, input, out, ann, is_expanded);
let mut s = State::new_from_input(cm, sess, filename, input, out, ann);

// When printing the AST, we sometimes need to inject `#[no_std]` here.
// Since you can't compile the HIR, it's not necessary.
Expand All @@ -144,36 +132,21 @@ impl<'a> State<'a> {
filename: FileName,
input: &mut dyn Read,
out: Box<dyn Write + 'a>,
ann: &'a dyn PpAnn,
is_expanded: bool)
ann: &'a dyn PpAnn)
-> State<'a> {
let (cmnts, lits) = comments::gather_comments_and_literals(sess, filename, input);

State::new(cm,
out,
ann,
Some(cmnts),
// If the code is post expansion, don't use the table of
// literals, since it doesn't correspond with the literals
// in the AST anymore.
if is_expanded {
None
} else {
Some(lits)
})
let comments = comments::gather_comments(sess, filename, input);
State::new(cm, out, ann, Some(comments))
}

pub fn new(cm: &'a SourceMap,
out: Box<dyn Write + 'a>,
ann: &'a dyn PpAnn,
comments: Option<Vec<comments::Comment>>,
literals: Option<Vec<comments::Literal>>)
comments: Option<Vec<comments::Comment>>)
-> State<'a> {
State {
s: pp::mk_printer(out, default_columns),
cm: Some(cm),
comments,
literals: literals.unwrap_or_default().into_iter().peekable(),
cur_cmnt: 0,
boxes: Vec::new(),
ann,
Expand All @@ -190,7 +163,6 @@ pub fn to_string<F>(ann: &dyn PpAnn, f: F) -> String
s: pp::mk_printer(Box::new(&mut wr), default_columns),
cm: None,
comments: None,
literals: vec![].into_iter().peekable(),
cur_cmnt: 0,
boxes: Vec::new(),
ann,
Expand Down Expand Up @@ -1279,9 +1251,6 @@ impl<'a> State<'a> {

fn print_literal(&mut self, lit: &hir::Lit) -> io::Result<()> {
self.maybe_print_comment(lit.span.lo())?;
if let Some(ltrl) = self.next_lit(lit.span.lo()) {
return self.writer().word(ltrl.lit.clone());
}
match lit.node {
hir::LitKind::Str(st, style) => self.print_string(&st.as_str(), style),
hir::LitKind::Err(st) => {
Expand Down
6 changes: 2 additions & 4 deletions src/librustc_driver/pretty.rs
Expand Up @@ -805,8 +805,7 @@ pub fn print_after_hir_lowering<'tcx>(
src_name,
&mut rdr,
box out,
annotation.pp_ann(),
true)
annotation.pp_ann())
})
}

Expand All @@ -829,8 +828,7 @@ pub fn print_after_hir_lowering<'tcx>(
src_name,
&mut rdr,
box out,
annotation.pp_ann(),
true);
annotation.pp_ann());
for node_id in uii.all_matching_node_ids(hir_map) {
let node = hir_map.get(node_id);
pp_state.print_node(node)?;
Expand Down
16 changes: 8 additions & 8 deletions src/librustdoc/clean/cfg.rs
Expand Up @@ -414,10 +414,9 @@ impl<'a> fmt::Display for Html<'a> {
mod test {
use super::Cfg;

use syntax::symbol::Symbol;
use syntax::ast::*;
use syntax::source_map::dummy_spanned;
use syntax_pos::DUMMY_SP;
use syntax::ast::*;
use syntax::symbol::Symbol;
use syntax::with_globals;

fn word_cfg(s: &str) -> Cfg {
Expand Down Expand Up @@ -592,12 +591,11 @@ mod test {
let mi = dummy_meta_item_word("all");
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("all")));

let node = LitKind::Str(Symbol::intern("done"), StrStyle::Cooked);
let (token, suffix) = node.lit_token();
let mi = MetaItem {
path: Path::from_ident(Ident::from_str("all")),
node: MetaItemKind::NameValue(dummy_spanned(LitKind::Str(
Symbol::intern("done"),
StrStyle::Cooked,
))),
node: MetaItemKind::NameValue(Lit { node, token, suffix, span: DUMMY_SP }),
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(name_value_cfg("all", "done")));
Expand Down Expand Up @@ -627,9 +625,11 @@ mod test {
#[test]
fn test_parse_err() {
with_globals(|| {
let node = LitKind::Bool(false);
let (token, suffix) = node.lit_token();
let mi = MetaItem {
path: Path::from_ident(Ident::from_str("foo")),
node: MetaItemKind::NameValue(dummy_spanned(LitKind::Bool(false))),
node: MetaItemKind::NameValue(Lit { node, token, suffix, span: DUMMY_SP }),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
Expand Down
7 changes: 4 additions & 3 deletions src/libsyntax/attr/mod.rs
Expand Up @@ -565,8 +565,9 @@ impl MetaItemKind {
Some(TokenTree::Token(_, token::Eq)) => {
tokens.next();
return if let Some(TokenTree::Token(span, token)) = tokens.next() {
LitKind::from_token(token)
.map(|(node, token, suffix)| MetaItemKind::NameValue(Lit { node, token, suffix, span }))
LitKind::from_token(token).map(|(node, token, suffix)| {
MetaItemKind::NameValue(Lit { node, token, suffix, span })
})
} else {
None
};
Expand Down Expand Up @@ -635,7 +636,7 @@ impl LitKind {
}
}

pub(crate) fn lit_token(&self) -> (token::Lit, Option<Symbol>) {
pub fn lit_token(&self) -> (token::Lit, Option<Symbol>) {
use std::ascii;

match *self {
Expand Down
29 changes: 3 additions & 26 deletions src/libsyntax/parse/lexer/comments.rs
Expand Up @@ -3,8 +3,7 @@ pub use CommentStyle::*;
use crate::ast;
use crate::source_map::SourceMap;
use crate::parse::lexer::{is_block_doc_comment, is_pattern_whitespace};
use crate::parse::lexer::{self, ParseSess, StringReader, TokenAndSpan};
use crate::print::pprust;
use crate::parse::lexer::{self, ParseSess, StringReader};

use syntax_pos::{BytePos, CharPos, Pos, FileName};
use log::debug;
Expand Down Expand Up @@ -339,16 +338,9 @@ fn consume_comment(rdr: &mut StringReader<'_>,
debug!("<<< consume comment");
}

#[derive(Clone)]
pub struct Literal {
pub lit: String,
pub pos: BytePos,
}

// it appears this function is called only from pprust... that's
// probably not a good thing.
pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut dyn Read)
-> (Vec<Comment>, Vec<Literal>)
pub fn gather_comments(sess: &ParseSess, path: FileName, srdr: &mut dyn Read) -> Vec<Comment>
{
let mut src = String::new();
srdr.read_to_string(&mut src).unwrap();
Expand All @@ -357,7 +349,6 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut
let mut rdr = lexer::StringReader::new_raw(sess, source_file, None);

let mut comments: Vec<Comment> = Vec::new();
let mut literals: Vec<Literal> = Vec::new();
let mut code_to_the_left = false; // Only code
let mut anything_to_the_left = false; // Code or comments

Expand All @@ -382,26 +373,12 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut
}
}

let bstart = rdr.pos;
rdr.next_token();
// discard, and look ahead; we're working with internal state
let TokenAndSpan { tok, sp } = rdr.peek();
if tok.is_lit() {
rdr.with_str_from(bstart, |s| {
debug!("tok lit: {}", s);
literals.push(Literal {
lit: s.to_string(),
pos: sp.lo(),
});
})
} else {
debug!("tok: {}", pprust::token_to_string(&tok));
}
code_to_the_left = true;
anything_to_the_left = true;
}

(comments, literals)
comments
}

#[cfg(test)]
Expand Down
10 changes: 5 additions & 5 deletions src/libsyntax/parse/parser.rs
Expand Up @@ -2121,11 +2121,11 @@ impl<'a> Parser<'a> {
Applicability::MachineApplicable,
);
err.emit();
return Ok(match float_suffix {
"f32" => (ast::LitKind::Float(val, ast::FloatTy::F32), token::Float(val), suffix),
"f64" => (ast::LitKind::Float(val, ast::FloatTy::F64), token::Float(val), suffix),
_ => (ast::LitKind::FloatUnsuffixed(val), token::Float(val), suffix),
});
return Ok((match float_suffix {
"f32" => ast::LitKind::Float(val, ast::FloatTy::F32),
"f64" => ast::LitKind::Float(val, ast::FloatTy::F64),
_ => ast::LitKind::FloatUnsuffixed(val),
}, token::Float(val), suffix));
} else {
unreachable!();
};
Expand Down

0 comments on commit a5b3f33

Please sign in to comment.