Skip to content

Commit

Permalink
remove unnecessary parentheses from range notation
Browse files Browse the repository at this point in the history
  • Loading branch information
Jorge Aparicio committed Jan 19, 2015
1 parent 43f2c19 commit 4968485
Show file tree
Hide file tree
Showing 38 changed files with 70 additions and 70 deletions.
2 changes: 1 addition & 1 deletion src/libcollections/bit.rs
Expand Up @@ -330,7 +330,7 @@ impl Bitv {

if extra_bytes > 0 {
let mut last_word = 0u32;
for (i, &byte) in bytes[(complete_words*4)..].iter().enumerate() {
for (i, &byte) in bytes[complete_words*4..].iter().enumerate() {
last_word |= (reverse_bits(byte) as u32) << (i * 8);
}
bitv.storage.push(last_word);
Expand Down
4 changes: 2 additions & 2 deletions src/libcollections/vec.rs
Expand Up @@ -2158,7 +2158,7 @@ mod tests {
#[should_fail]
fn test_slice_out_of_bounds_1() {
let x: Vec<int> = vec![1, 2, 3, 4, 5];
&x[(-1)..];
&x[-1..];
}

#[test]
Expand All @@ -2172,7 +2172,7 @@ mod tests {
#[should_fail]
fn test_slice_out_of_bounds_3() {
let x: Vec<int> = vec![1, 2, 3, 4, 5];
&x[(-1)..4];
&x[-1..4];
}

#[test]
Expand Down
8 changes: 4 additions & 4 deletions src/libcore/slice.rs
Expand Up @@ -240,7 +240,7 @@ impl<T> SliceExt for [T] {

#[inline]
fn init(&self) -> &[T] {
&self[..(self.len() - 1)]
&self[..self.len() - 1]
}

#[inline]
Expand Down Expand Up @@ -449,7 +449,7 @@ impl<T> SliceExt for [T] {
#[inline]
fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq {
let (m, n) = (self.len(), needle.len());
m >= n && needle == &self[(m-n)..]
m >= n && needle == &self[m-n..]
}

#[unstable]
Expand Down Expand Up @@ -973,7 +973,7 @@ impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool {
None => self.finish(),
Some(idx) => {
let ret = Some(&self.v[..idx]);
self.v = &self.v[(idx + 1)..];
self.v = &self.v[idx + 1..];
ret
}
}
Expand All @@ -998,7 +998,7 @@ impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> boo
match self.v.iter().rposition(|x| (self.pred)(x)) {
None => self.finish(),
Some(idx) => {
let ret = Some(&self.v[(idx + 1)..]);
let ret = Some(&self.v[idx + 1..]);
self.v = &self.v[..idx];
ret
}
Expand Down
2 changes: 1 addition & 1 deletion src/libcore/str/mod.rs
Expand Up @@ -1418,7 +1418,7 @@ impl StrExt for str {
#[inline]
fn ends_with(&self, needle: &str) -> bool {
let (m, n) = (self.len(), needle.len());
m >= n && needle.as_bytes() == &self.as_bytes()[(m-n)..]
m >= n && needle.as_bytes() == &self.as_bytes()[m-n..]
}

#[inline]
Expand Down
2 changes: 1 addition & 1 deletion src/libcoretest/iter.rs
Expand Up @@ -585,7 +585,7 @@ fn check_randacc_iter<A, T>(a: T, len: uint) where
fn test_double_ended_flat_map() {
let u = [0u,1];
let v = [5u,6,7,8];
let mut it = u.iter().flat_map(|x| v[(*x)..v.len()].iter());
let mut it = u.iter().flat_map(|x| v[*x..v.len()].iter());
assert_eq!(it.next_back().unwrap(), &8);
assert_eq!(it.next().unwrap(), &5);
assert_eq!(it.next_back().unwrap(), &7);
Expand Down
2 changes: 1 addition & 1 deletion src/libgetopts/lib.rs
Expand Up @@ -893,7 +893,7 @@ fn each_split_within<F>(ss: &str, lim: uint, mut it: F) -> bool where
(B, Cr, UnderLim) => { B }
(B, Cr, OverLim) if (i - last_start + 1) > lim
=> panic!("word starting with {} longer than limit!",
&ss[last_start..(i + 1)]),
&ss[last_start..i + 1]),
(B, Cr, OverLim) => {
*cont = it(&ss[slice_start..last_end]);
slice_start = last_start;
Expand Down
2 changes: 1 addition & 1 deletion src/libregex/parse.rs
Expand Up @@ -518,7 +518,7 @@ impl<'a> Parser<'a> {
};
self.chari = closer;
let greed = try!(self.get_next_greedy());
let inner = self.chars[(start+1)..closer].iter().cloned()
let inner = self.chars[start+1..closer].iter().cloned()
.collect::<String>();

// Parse the min and max values from the regex.
Expand Down
2 changes: 1 addition & 1 deletion src/librustc/metadata/decoder.rs
Expand Up @@ -74,7 +74,7 @@ fn lookup_hash<'a, F>(d: rbml::Doc<'a>, mut eq_fn: F, hash: u64) -> Option<rbml:
let mut ret = None;
reader::tagged_docs(tagged_doc.doc, belt, |elt| {
let pos = u64_from_be_bytes(elt.data, elt.start, 4) as uint;
if eq_fn(&elt.data[(elt.start + 4) .. elt.end]) {
if eq_fn(&elt.data[elt.start + 4 .. elt.end]) {
ret = Some(reader::doc_at(d.data, pos).unwrap().doc);
false
} else {
Expand Down
2 changes: 1 addition & 1 deletion src/librustc/metadata/tydecode.rs
Expand Up @@ -734,7 +734,7 @@ pub fn parse_def_id(buf: &[u8]) -> ast::DefId {
}

let crate_part = &buf[0u..colon_idx];
let def_part = &buf[(colon_idx + 1u)..len];
let def_part = &buf[colon_idx + 1u..len];

let crate_num = match str::from_utf8(crate_part).ok().and_then(|s| s.parse::<uint>()) {
Some(cn) => cn as ast::CrateNum,
Expand Down
2 changes: 1 addition & 1 deletion src/librustc/middle/check_match.rs
Expand Up @@ -927,7 +927,7 @@ pub fn specialize<'a>(cx: &MatchCheckCtxt, r: &[&'a Pat],
};
head.map(|mut head| {
head.push_all(&r[..col]);
head.push_all(&r[(col + 1)..]);
head.push_all(&r[col + 1..]);
head
})
}
Expand Down
6 changes: 3 additions & 3 deletions src/librustc/util/ppaux.rs
Expand Up @@ -542,17 +542,17 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>,
0
};

for t in tps[..(tps.len() - num_defaults)].iter() {
for t in tps[..tps.len() - num_defaults].iter() {
strs.push(ty_to_string(cx, *t))
}

if cx.lang_items.fn_trait_kind(did).is_some() {
format!("{}({}){}",
base,
if strs[0].starts_with("(") && strs[0].ends_with(",)") {
&strs[0][1 .. (strs[0].len() - 2)] // Remove '(' and ',)'
&strs[0][1 .. strs[0].len() - 2] // Remove '(' and ',)'
} else if strs[0].starts_with("(") && strs[0].ends_with(")") {
&strs[0][1 .. (strs[0].len() - 1)] // Remove '(' and ')'
&strs[0][1 .. strs[0].len() - 1] // Remove '(' and ')'
} else {
&strs[0][]
},
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_back/sha2.rs
Expand Up @@ -156,7 +156,7 @@ impl FixedBuffer for FixedBuffer64 {
// While we have at least a full buffer size chunk's worth of data, process that data
// without copying it into the buffer
while input.len() - i >= size {
func(&input[i..(i + size)]);
func(&input[i..i + size]);
i += size;
}

Expand Down
6 changes: 3 additions & 3 deletions src/librustc_resolve/lib.rs
Expand Up @@ -2082,8 +2082,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
let msg = format!("Could not find `{}` in `{}`",
// idx +- 1 to account for the
// colons on either side
&mpath[(idx + 1)..],
&mpath[..(idx - 1)]);
&mpath[idx + 1..],
&mpath[..idx - 1]);
return Failed(Some((span, msg)));
},
None => {
Expand Down Expand Up @@ -2756,7 +2756,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
for (i, rib) in ribs.iter().enumerate().rev() {
match rib.bindings.get(&name).cloned() {
Some(def_like) => {
return self.upvarify(&ribs[(i + 1)..], def_like, span);
return self.upvarify(&ribs[i + 1..], def_like, span);
}
None => {
// Continue.
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_trans/back/link.rs
Expand Up @@ -1183,7 +1183,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
// against the archive.
if sess.lto() {
let name = cratepath.filename_str().unwrap();
let name = &name[3..(name.len() - 5)]; // chop off lib/.rlib
let name = &name[3..name.len() - 5]; // chop off lib/.rlib
time(sess.time_passes(),
&format!("altering {}.rlib", name)[],
(), |()| {
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_trans/back/lto.rs
Expand Up @@ -60,7 +60,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,

let archive = ArchiveRO::open(&path).expect("wanted an rlib");
let file = path.filename_str().unwrap();
let file = &file[3..(file.len() - 5)]; // chop off lib/.rlib
let file = &file[3..file.len() - 5]; // chop off lib/.rlib
debug!("reading {}", file);
for i in iter::count(0u, 1) {
let bc_encoded = time(sess.time_passes(),
Expand Down Expand Up @@ -201,7 +201,7 @@ fn extract_compressed_bytecode_size_v1(bc: &[u8]) -> u64 {
}

fn read_from_le_bytes<T: Int>(bytes: &[u8], position_in_bytes: uint) -> T {
let byte_data = &bytes[position_in_bytes..(position_in_bytes + mem::size_of::<T>())];
let byte_data = &bytes[position_in_bytes..position_in_bytes + mem::size_of::<T>()];
let data = unsafe {
*(byte_data.as_ptr() as *const T)
};
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_trans/save/mod.rs
Expand Up @@ -186,7 +186,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
if len <= 2 {
return;
}
let sub_paths = &sub_paths[..(len-2)];
let sub_paths = &sub_paths[..len-2];
for &(ref span, ref qualname) in sub_paths.iter() {
self.fmt.sub_mod_ref_str(path.span,
*span,
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_trans/trans/_match.rs
Expand Up @@ -472,7 +472,7 @@ fn enter_default<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
enter_match(bcx, dm, m, col, val, |pats| {
if pat_is_binding_or_wild(dm, &*pats[col]) {
let mut r = pats[..col].to_vec();
r.push_all(&pats[(col + 1)..]);
r.push_all(&pats[col + 1..]);
Some(r)
} else {
None
Expand Down Expand Up @@ -983,7 +983,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
let dm = &tcx.def_map;

let mut vals_left = vals[0u..col].to_vec();
vals_left.push_all(&vals[(col + 1u)..]);
vals_left.push_all(&vals[col + 1u..]);
let ccx = bcx.fcx.ccx;

// Find a real id (we're adding placeholder wildcard patterns, but
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_trans/trans/cabi_x86_64.rs
Expand Up @@ -361,7 +361,7 @@ fn llreg_ty(ccx: &CrateContext, cls: &[RegClass]) -> Type {
}
_ => unreachable!(),
};
let vec_len = llvec_len(&cls[(i + 1u)..]);
let vec_len = llvec_len(&cls[i + 1u..]);
let vec_ty = Type::vector(&elt_ty, vec_len as u64 * elts_per_word);
tys.push(vec_ty);
i += vec_len;
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_trans/trans/debuginfo.rs
Expand Up @@ -1163,7 +1163,7 @@ pub fn get_cleanup_debug_loc_for_ast_node<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
if let Some(code_snippet) = code_snippet {
let bytes = code_snippet.as_bytes();

if bytes.len() > 0 && &bytes[(bytes.len()-1)..] == b"}" {
if bytes.len() > 0 && &bytes[bytes.len()-1..] == b"}" {
cleanup_span = Span {
lo: node_span.hi - codemap::BytePos(1),
hi: node_span.hi,
Expand Down Expand Up @@ -1752,7 +1752,7 @@ fn file_metadata(cx: &CrateContext, full_path: &str) -> DIFile {
let work_dir = cx.sess().working_dir.as_str().unwrap();
let file_name =
if full_path.starts_with(work_dir) {
&full_path[(work_dir.len() + 1u)..full_path.len()]
&full_path[work_dir.len() + 1u..full_path.len()]
} else {
full_path
};
Expand Down
2 changes: 1 addition & 1 deletion src/librustdoc/html/format.rs
Expand Up @@ -358,7 +358,7 @@ fn path<F, G>(w: &mut fmt::Formatter,
// This is a documented path, link to it!
Some((ref fqp, shortty)) if abs_root.is_some() => {
let mut url = String::from_str(abs_root.unwrap().as_slice());
let to_link = &fqp[..(fqp.len() - 1)];
let to_link = &fqp[..fqp.len() - 1];
for component in to_link.iter() {
url.push_str(component.as_slice());
url.push_str("/");
Expand Down
12 changes: 6 additions & 6 deletions src/librustdoc/html/render.rs
Expand Up @@ -404,7 +404,7 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> io::IoResult<String>
search_index.push(IndexItem {
ty: shortty(item),
name: item.name.clone().unwrap(),
path: fqp[..(fqp.len() - 1)].connect("::"),
path: fqp[..fqp.len() - 1].connect("::"),
desc: shorter(item.doc_value()).to_string(),
parent: Some(did),
});
Expand Down Expand Up @@ -559,7 +559,7 @@ fn write_shared(cx: &Context,
};

let mut mydst = dst.clone();
for part in remote_path[..(remote_path.len() - 1)].iter() {
for part in remote_path[..remote_path.len() - 1].iter() {
mydst.push(part.as_slice());
try!(mkdir(&mydst));
}
Expand Down Expand Up @@ -842,7 +842,7 @@ impl DocFolder for Cache {
clean::StructFieldItem(..) |
clean::VariantItem(..) => {
((Some(*self.parent_stack.last().unwrap()),
Some(&self.stack[..(self.stack.len() - 1)])),
Some(&self.stack[..self.stack.len() - 1])),
false)
}
clean::MethodItem(..) => {
Expand All @@ -853,13 +853,13 @@ impl DocFolder for Cache {
let did = *last;
let path = match self.paths.get(&did) {
Some(&(_, ItemType::Trait)) =>
Some(&self.stack[..(self.stack.len() - 1)]),
Some(&self.stack[..self.stack.len() - 1]),
// The current stack not necessarily has correlation for
// where the type was defined. On the other hand,
// `paths` always has the right information if present.
Some(&(ref fqp, ItemType::Struct)) |
Some(&(ref fqp, ItemType::Enum)) =>
Some(&fqp[..(fqp.len() - 1)]),
Some(&fqp[..fqp.len() - 1]),
Some(..) => Some(self.stack.as_slice()),
None => None
};
Expand Down Expand Up @@ -1185,7 +1185,7 @@ impl Context {
.collect::<String>();
match cache().paths.get(&it.def_id) {
Some(&(ref names, _)) => {
for name in (&names[..(names.len() - 1)]).iter() {
for name in (&names[..names.len() - 1]).iter() {
url.push_str(name.as_slice());
url.push_str("/");
}
Expand Down
4 changes: 2 additions & 2 deletions src/libserialize/json.rs
Expand Up @@ -1222,7 +1222,7 @@ impl Stack {
InternalIndex(i) => StackElement::Index(i),
InternalKey(start, size) => {
StackElement::Key(str::from_utf8(
&self.str_buffer[(start as uint) .. (start as uint + size as uint)])
&self.str_buffer[start as uint .. start as uint + size as uint])
.unwrap())
}
}
Expand Down Expand Up @@ -1265,7 +1265,7 @@ impl Stack {
Some(&InternalIndex(i)) => Some(StackElement::Index(i)),
Some(&InternalKey(start, size)) => {
Some(StackElement::Key(str::from_utf8(
&self.str_buffer[(start as uint) .. (start+size) as uint]
&self.str_buffer[start as uint .. (start+size) as uint]
).unwrap()))
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/libstd/io/buffered.rs
Expand Up @@ -281,9 +281,9 @@ impl<W: Writer> Writer for LineBufferedWriter<W> {
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
match buf.iter().rposition(|&b| b == b'\n') {
Some(i) => {
try!(self.inner.write(&buf[..(i + 1)]));
try!(self.inner.write(&buf[..i + 1]));
try!(self.inner.flush());
try!(self.inner.write(&buf[(i + 1)..]));
try!(self.inner.write(&buf[i + 1..]));
Ok(())
}
None => self.inner.write(buf),
Expand Down
4 changes: 2 additions & 2 deletions src/libstd/io/mem.rs
Expand Up @@ -159,7 +159,7 @@ impl Reader for MemReader {

let write_len = min(buf.len(), self.buf.len() - self.pos);
{
let input = &self.buf[self.pos.. (self.pos + write_len)];
let input = &self.buf[self.pos.. self.pos + write_len];
let output = buf.slice_to_mut(write_len);
assert_eq!(input.len(), output.len());
slice::bytes::copy_memory(output, input);
Expand Down Expand Up @@ -349,7 +349,7 @@ impl<'a> Reader for BufReader<'a> {

let write_len = min(buf.len(), self.buf.len() - self.pos);
{
let input = &self.buf[self.pos.. (self.pos + write_len)];
let input = &self.buf[self.pos.. self.pos + write_len];
let output = buf.slice_to_mut(write_len);
assert_eq!(input.len(), output.len());
slice::bytes::copy_memory(output, input);
Expand Down
2 changes: 1 addition & 1 deletion src/libstd/io/mod.rs
Expand Up @@ -1449,7 +1449,7 @@ pub trait Buffer: Reader {
};
match available.iter().position(|&b| b == byte) {
Some(i) => {
res.push_all(&available[..(i + 1)]);
res.push_all(&available[..i + 1]);
used = i + 1;
break
}
Expand Down
2 changes: 1 addition & 1 deletion src/libstd/path/mod.rs
Expand Up @@ -399,7 +399,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe {
match name.rposition_elem(&dot) {
None | Some(0) => None,
Some(1) if name == b".." => None,
Some(pos) => Some(&name[(pos+1)..])
Some(pos) => Some(&name[pos+1..])
}
}
}
Expand Down

0 comments on commit 4968485

Please sign in to comment.