Skip to content

Commit

Permalink
Omit integer suffix when unnecessary
Browse files Browse the repository at this point in the history
See PR # 21378 for context
  • Loading branch information
Alfie John committed Feb 2, 2015
1 parent ca4b967 commit 9683745
Show file tree
Hide file tree
Showing 15 changed files with 142 additions and 142 deletions.
20 changes: 10 additions & 10 deletions src/libsyntax/ast_util.rs
Expand Up @@ -322,21 +322,21 @@ pub fn struct_field_visibility(field: ast::StructField) -> Visibility {
pub fn operator_prec(op: ast::BinOp_) -> usize {
match op {
// 'as' sits here with 12
BiMul | BiDiv | BiRem => 11us,
BiAdd | BiSub => 10us,
BiShl | BiShr => 9us,
BiBitAnd => 8us,
BiBitXor => 7us,
BiBitOr => 6us,
BiLt | BiLe | BiGe | BiGt | BiEq | BiNe => 3us,
BiAnd => 2us,
BiOr => 1us
BiMul | BiDiv | BiRem => 11,
BiAdd | BiSub => 10,
BiShl | BiShr => 9,
BiBitAnd => 8,
BiBitXor => 7,
BiBitOr => 6,
BiLt | BiLe | BiGe | BiGt | BiEq | BiNe => 3,
BiAnd => 2,
BiOr => 1
}
}

/// Precedence of the `as` operator, which is a binary operator
/// not appearing in the prior table.
pub const AS_PREC: usize = 12us;
pub const AS_PREC: usize = 12;

pub fn empty_generics() -> Generics {
Generics {
Expand Down
18 changes: 9 additions & 9 deletions src/libsyntax/codemap.rs
Expand Up @@ -431,7 +431,7 @@ impl CodeMap {
let lo = self.lookup_char_pos(sp.lo);
let hi = self.lookup_char_pos(sp.hi);
let mut lines = Vec::new();
for i in lo.line - 1us..hi.line as usize {
for i in lo.line - 1..hi.line as usize {
lines.push(i);
};
FileLines {file: lo.file, lines: lines}
Expand Down Expand Up @@ -499,10 +499,10 @@ impl CodeMap {
let files = self.files.borrow();
let files = &*files;
let len = files.len();
let mut a = 0us;
let mut a = 0;
let mut b = len;
while b - a > 1us {
let m = (a + b) / 2us;
while b - a > 1 {
let m = (a + b) / 2;
if files[m].start_pos > pos {
b = m;
} else {
Expand Down Expand Up @@ -538,12 +538,12 @@ impl CodeMap {

let files = self.files.borrow();
let f = (*files)[idx].clone();
let mut a = 0us;
let mut a = 0;
{
let lines = f.lines.borrow();
let mut b = lines.len();
while b - a > 1us {
let m = (a + b) / 2us;
while b - a > 1 {
let m = (a + b) / 2;
if (*lines)[m] > pos { b = m; } else { a = m; }
}
}
Expand All @@ -552,7 +552,7 @@ impl CodeMap {

fn lookup_pos(&self, pos: BytePos) -> Loc {
let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos);
let line = a + 1us; // Line numbers start at 1
let line = a + 1; // Line numbers start at 1
let chpos = self.bytepos_to_file_charpos(pos);
let linebpos = (*f.lines.borrow())[a];
let linechpos = self.bytepos_to_file_charpos(linebpos);
Expand Down Expand Up @@ -763,7 +763,7 @@ mod test {

assert_eq!(file_lines.file.name, "blork.rs");
assert_eq!(file_lines.lines.len(), 1);
assert_eq!(file_lines.lines[0], 1us);
assert_eq!(file_lines.lines[0], 1);
}

#[test]
Expand Down
32 changes: 16 additions & 16 deletions src/libsyntax/diagnostic.rs
Expand Up @@ -25,7 +25,7 @@ use term::WriterWrapper;
use term;

/// maximum number of lines we will print for each error; arbitrary.
static MAX_LINES: usize = 6us;
static MAX_LINES: usize = 6;

#[derive(Clone, Copy)]
pub enum RenderSpan {
Expand Down Expand Up @@ -155,19 +155,19 @@ impl Handler {
self.bump_err_count();
}
pub fn bump_err_count(&self) {
self.err_count.set(self.err_count.get() + 1us);
self.err_count.set(self.err_count.get() + 1);
}
pub fn err_count(&self) -> usize {
self.err_count.get()
}
pub fn has_errors(&self) -> bool {
self.err_count.get() > 0us
self.err_count.get() > 0
}
pub fn abort_if_errors(&self) {
let s;
match self.err_count.get() {
0us => return,
1us => s = "aborting due to previous error".to_string(),
0 => return,
1 => s = "aborting due to previous error".to_string(),
_ => {
s = format!("aborting due to {} previous errors",
self.err_count.get());
Expand Down Expand Up @@ -457,7 +457,7 @@ fn highlight_lines(err: &mut EmitterWriter,
let mut elided = false;
let mut display_lines = &lines.lines[];
if display_lines.len() > MAX_LINES {
display_lines = &display_lines[0us..MAX_LINES];
display_lines = &display_lines[0..MAX_LINES];
elided = true;
}
// Print the offending lines
Expand All @@ -468,32 +468,32 @@ fn highlight_lines(err: &mut EmitterWriter,
}
}
if elided {
let last_line = display_lines[display_lines.len() - 1us];
let s = format!("{}:{} ", fm.name, last_line + 1us);
let last_line = display_lines[display_lines.len() - 1];
let s = format!("{}:{} ", fm.name, last_line + 1);
try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len()));
}

// FIXME (#3260)
// If there's one line at fault we can easily point to the problem
if lines.lines.len() == 1us {
if lines.lines.len() == 1 {
let lo = cm.lookup_char_pos(sp.lo);
let mut digits = 0us;
let mut num = (lines.lines[0] + 1us) / 10us;
let mut digits = 0;
let mut num = (lines.lines[0] + 1) / 10;

// how many digits must be indent past?
while num > 0us { num /= 10us; digits += 1us; }
while num > 0 { num /= 10; digits += 1; }

// indent past |name:## | and the 0-offset column location
let left = fm.name.len() + digits + lo.col.to_usize() + 3us;
let left = fm.name.len() + digits + lo.col.to_usize() + 3;
let mut s = String::new();
// Skip is the number of characters we need to skip because they are
// part of the 'filename:line ' part of the previous line.
let skip = fm.name.len() + digits + 3us;
let skip = fm.name.len() + digits + 3;
for _ in 0..skip {
s.push(' ');
}
if let Some(orig) = fm.get_line(lines.lines[0]) {
for pos in 0us..left - skip {
for pos in 0..left - skip {
let cur_char = orig.as_bytes()[pos] as char;
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
Expand All @@ -511,7 +511,7 @@ fn highlight_lines(err: &mut EmitterWriter,
let hi = cm.lookup_char_pos(sp.hi);
if hi.col != lo.col {
// the ^ already takes up one space
let num_squigglies = hi.col.to_usize() - lo.col.to_usize() - 1us;
let num_squigglies = hi.col.to_usize() - lo.col.to_usize() - 1;
for _ in 0..num_squigglies {
s.push('~');
}
Expand Down
16 changes: 8 additions & 8 deletions src/libsyntax/ext/deriving/generic/mod.rs
Expand Up @@ -770,7 +770,7 @@ impl<'a> MethodDef<'a> {
let mut raw_fields = Vec::new(); // ~[[fields of self],
// [fields of next Self arg], [etc]]
let mut patterns = Vec::new();
for i in 0us..self_args.len() {
for i in 0..self_args.len() {
let struct_path= cx.path(DUMMY_SP, vec!( type_ident ));
let (pat, ident_expr) =
trait_.create_struct_pattern(cx,
Expand Down Expand Up @@ -859,8 +859,8 @@ impl<'a> MethodDef<'a> {
/// (&A2(ref __self_0),
/// &A2(ref __arg_1_0)) => (*__self_0).eq(&(*__arg_1_0)),
/// _ => {
/// let __self_vi = match *self { A1(..) => 0us, A2(..) => 1us };
/// let __arg_1_vi = match *__arg_1 { A1(..) => 0us, A2(..) => 1us };
/// let __self_vi = match *self { A1(..) => 0, A2(..) => 1 };
/// let __arg_1_vi = match *__arg_1 { A1(..) => 0, A2(..) => 1 };
/// false
/// }
/// }
Expand Down Expand Up @@ -904,8 +904,8 @@ impl<'a> MethodDef<'a> {
/// (Variant2, Variant2, Variant2) => ... // delegate Matching on Variant2
/// ...
/// _ => {
/// let __this_vi = match this { Variant1 => 0us, Variant2 => 1us, ... };
/// let __that_vi = match that { Variant1 => 0us, Variant2 => 1us, ... };
/// let __this_vi = match this { Variant1 => 0, Variant2 => 1, ... };
/// let __that_vi = match that { Variant1 => 0, Variant2 => 1, ... };
/// ... // catch-all remainder can inspect above variant index values.
/// }
/// }
Expand Down Expand Up @@ -1067,13 +1067,13 @@ impl<'a> MethodDef<'a> {
//
// ```
// let __self0_vi = match self {
// A => 0us, B(..) => 1us, C(..) => 2us
// A => 0, B(..) => 1, C(..) => 2
// };
// let __self1_vi = match __arg1 {
// A => 0us, B(..) => 1us, C(..) => 2us
// A => 0, B(..) => 1, C(..) => 2
// };
// let __self2_vi = match __arg2 {
// A => 0us, B(..) => 1us, C(..) => 2us
// A => 0, B(..) => 1, C(..) => 2
// };
// ```
let mut index_let_stmts: Vec<P<ast::Stmt>> = Vec::new();
Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/ext/expand.rs
Expand Up @@ -362,7 +362,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
// in this file.
// Token-tree macros:
MacInvocTT(pth, tts, _) => {
if pth.segments.len() > 1us {
if pth.segments.len() > 1 {
fld.cx.span_err(pth.span,
"expected macro name without module \
separators");
Expand Down Expand Up @@ -931,7 +931,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
},
_ => unreachable!()
};
if pth.segments.len() > 1us {
if pth.segments.len() > 1 {
fld.cx.span_err(pth.span, "expected macro name without module separators");
return DummyResult::raw_pat(span);
}
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/quote.rs
Expand Up @@ -709,7 +709,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
// try removing it when enough of them are gone.

let mut p = cx.new_parser_from_tts(tts);
p.quote_depth += 1us;
p.quote_depth += 1;

let cx_expr = p.parse_expr();
if !p.eat(&token::Comma) {
Expand Down
34 changes: 17 additions & 17 deletions src/libsyntax/ext/tt/macro_parser.rs
Expand Up @@ -171,11 +171,11 @@ pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: ByteP
stack: vec![],
top_elts: TtSeq(ms),
sep: sep,
idx: 0us,
idx: 0,
up: None,
matches: matches,
match_lo: 0us,
match_cur: 0us,
match_lo: 0,
match_cur: 0,
match_hi: match_idx_hi,
sp_lo: lo
}
Expand Down Expand Up @@ -238,7 +238,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
}
}
let mut ret_val = HashMap::new();
let mut idx = 0us;
let mut idx = 0;
for m in ms.iter() { n_rec(p_s, m, res, &mut ret_val, &mut idx) }
ret_val
}
Expand Down Expand Up @@ -383,7 +383,7 @@ pub fn parse(sess: &ParseSess,
if seq.op == ast::ZeroOrMore {
let mut new_ei = ei.clone();
new_ei.match_cur += seq.num_captures;
new_ei.idx += 1us;
new_ei.idx += 1;
//we specifically matched zero repeats.
for idx in ei.match_cur..ei.match_cur + seq.num_captures {
(&mut new_ei.matches[idx]).push(Rc::new(MatchedSeq(vec![], sp)));
Expand All @@ -398,7 +398,7 @@ pub fn parse(sess: &ParseSess,
cur_eis.push(box MatcherPos {
stack: vec![],
sep: seq.separator.clone(),
idx: 0us,
idx: 0,
matches: matches,
match_lo: ei_t.match_cur,
match_cur: ei_t.match_cur,
Expand Down Expand Up @@ -442,20 +442,20 @@ pub fn parse(sess: &ParseSess,

/* error messages here could be improved with links to orig. rules */
if token_name_eq(&tok, &token::Eof) {
if eof_eis.len() == 1us {
if eof_eis.len() == 1 {
let mut v = Vec::new();
for dv in (&mut eof_eis[0]).matches.iter_mut() {
v.push(dv.pop().unwrap());
}
return Success(nameize(sess, ms, &v[]));
} else if eof_eis.len() > 1us {
} else if eof_eis.len() > 1 {
return Error(sp, "ambiguity: multiple successful parses".to_string());
} else {
return Failure(sp, "unexpected end of macro invocation".to_string());
}
} else {
if (bb_eis.len() > 0us && next_eis.len() > 0us)
|| bb_eis.len() > 1us {
if (bb_eis.len() > 0 && next_eis.len() > 0)
|| bb_eis.len() > 1 {
let nts = bb_eis.iter().map(|ei| {
match ei.top_elts.get_tt(ei.idx) {
TtToken(_, MatchNt(bind, name, _, _)) => {
Expand All @@ -469,12 +469,12 @@ pub fn parse(sess: &ParseSess,
"local ambiguity: multiple parsing options: \
built-in NTs {} or {} other options.",
nts, next_eis.len()).to_string());
} else if bb_eis.len() == 0us && next_eis.len() == 0us {
} else if bb_eis.len() == 0 && next_eis.len() == 0 {
return Failure(sp, format!("no rules expected the token `{}`",
pprust::token_to_string(&tok)).to_string());
} else if next_eis.len() > 0us {
} else if next_eis.len() > 0 {
/* Now process the next token */
while next_eis.len() > 0us {
while next_eis.len() > 0 {
cur_eis.push(next_eis.pop().unwrap());
}
rdr.next_token();
Expand All @@ -488,7 +488,7 @@ pub fn parse(sess: &ParseSess,
let match_cur = ei.match_cur;
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
parse_nt(&mut rust_parser, span, name_string.get()))));
ei.idx += 1us;
ei.idx += 1;
ei.match_cur += 1;
}
_ => panic!()
Expand All @@ -501,16 +501,16 @@ pub fn parse(sess: &ParseSess,
}
}

assert!(cur_eis.len() > 0us);
assert!(cur_eis.len() > 0);
}
}

pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal {
match name {
"tt" => {
p.quote_depth += 1us; //but in theory, non-quoted tts might be useful
p.quote_depth += 1; //but in theory, non-quoted tts might be useful
let res = token::NtTT(P(p.parse_token_tree()));
p.quote_depth -= 1us;
p.quote_depth -= 1;
return res;
}
_ => {}
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/tt/transcribe.rs
Expand Up @@ -223,7 +223,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
r.repeat_len.pop();
}
} else { /* repeat */
*r.repeat_idx.last_mut().unwrap() += 1us;
*r.repeat_idx.last_mut().unwrap() += 1;
r.stack.last_mut().unwrap().idx = 0;
match r.stack.last().unwrap().sep.clone() {
Some(tk) => {
Expand Down

0 comments on commit 9683745

Please sign in to comment.