Skip to content

Commit

Permalink
Cosmetic improvements
Browse files Browse the repository at this point in the history
  • Loading branch information
Alexander Regueiro committed Jan 13, 2019
1 parent 2cf736f commit 88336ea
Show file tree
Hide file tree
Showing 21 changed files with 144 additions and 144 deletions.
29 changes: 14 additions & 15 deletions src/libcore/slice/memchr.rs
@@ -1,5 +1,4 @@
//
// Original implementation taken from rust-memchr
// Original implementation taken from rust-memchr.
// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch

use cmp;
Expand All @@ -8,13 +7,13 @@ use mem;
const LO_U64: u64 = 0x0101010101010101;
const HI_U64: u64 = 0x8080808080808080;

// use truncation
// Use truncation.
const LO_USIZE: usize = LO_U64 as usize;
const HI_USIZE: usize = HI_U64 as usize;

/// Return `true` if `x` contains any zero byte.
/// Returns whether `x` contains any zero byte.
///
/// From *Matters Computational*, J. Arndt
/// From *Matters Computational*, J. Arndt:
///
/// "The idea is to subtract one from each of the bytes and then look for
/// bytes where the borrow propagated all the way to the most significant
Expand All @@ -36,7 +35,7 @@ fn repeat_byte(b: u8) -> usize {
(b as usize) * (::usize::MAX / 255)
}

/// Return the first index matching the byte `x` in `text`.
/// Returns the first index matching the byte `x` in `text`.
pub fn memchr(x: u8, text: &[u8]) -> Option<usize> {
// Scan for a single byte value by reading two `usize` words at a time.
//
Expand Down Expand Up @@ -77,18 +76,18 @@ pub fn memchr(x: u8, text: &[u8]) -> Option<usize> {
}
}

// find the byte after the point the body loop stopped
// Find the byte after the point the body loop stopped.
text[offset..].iter().position(|elt| *elt == x).map(|i| offset + i)
}

/// Return the last index matching the byte `x` in `text`.
/// Returns the last index matching the byte `x` in `text`.
pub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {
// Scan for a single byte value by reading two `usize` words at a time.
//
// Split `text` in three parts
// - unaligned tail, after the last word aligned address in text
// - body, scan by 2 words at a time
// - the first remaining bytes, < 2 word size
// Split `text` in three parts:
// - unaligned tail, after the last word aligned address in text,
// - body, scanned by 2 words at a time,
// - the first remaining bytes, < 2 word size.
let len = text.len();
let ptr = text.as_ptr();
type Chunk = usize;
Expand All @@ -105,7 +104,7 @@ pub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {
return Some(offset + index);
}

// search the body of the text, make sure we don't cross min_aligned_offset.
// Search the body of the text, make sure we don't cross min_aligned_offset.
// offset is always aligned, so just testing `>` is sufficient and avoids possible
// overflow.
let repeated_x = repeat_byte(x);
Expand All @@ -116,7 +115,7 @@ pub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {
let u = *(ptr.offset(offset as isize - 2 * chunk_bytes as isize) as *const Chunk);
let v = *(ptr.offset(offset as isize - chunk_bytes as isize) as *const Chunk);

// break if there is a matching byte
// Break if there is a matching byte.
let zu = contains_zero_byte(u ^ repeated_x);
let zv = contains_zero_byte(v ^ repeated_x);
if zu || zv {
Expand All @@ -126,6 +125,6 @@ pub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {
offset -= 2 * chunk_bytes;
}

// find the byte before the point the body loop stopped
// Find the byte before the point the body loop stopped.
text[..offset].iter().rposition(|elt| *elt == x)
}
2 changes: 1 addition & 1 deletion src/libserialize/json.rs
@@ -1,4 +1,4 @@
// Rust JSON serialization library
// Rust JSON serialization library.
// Copyright (c) 2011 Google Inc.

#![forbid(non_camel_case_types)]
Expand Down
3 changes: 1 addition & 2 deletions src/libstd/memchr.rs
@@ -1,5 +1,4 @@
//
// Original implementation taken from rust-memchr
// Original implementation taken from rust-memchr.
// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch

/// A safe interface to `memchr`.
Expand Down
2 changes: 1 addition & 1 deletion src/libstd/sys/cloudabi/abi/cloudabi.rs
@@ -1,4 +1,4 @@
// Copyright (c) 2016-2017 Nuxi (https://nuxi.nl/) and contributors.
// Copyright (c) 2016-2017 Nuxi <https://nuxi.nl/> and contributors.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
Expand Down
3 changes: 1 addition & 2 deletions src/libstd/sys/redox/memchr.rs
@@ -1,5 +1,4 @@
//
// Original implementation taken from rust-memchr
// Original implementation taken from rust-memchr.
// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch

pub use core::slice::memchr::{memchr, memrchr};
3 changes: 1 addition & 2 deletions src/libstd/sys/unix/memchr.rs
@@ -1,5 +1,4 @@
//
// Original implementation taken from rust-memchr
// Original implementation taken from rust-memchr.
// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch

pub fn memchr(needle: u8, haystack: &[u8]) -> Option<usize> {
Expand Down
5 changes: 2 additions & 3 deletions src/libstd/sys/windows/memchr.rs
@@ -1,6 +1,5 @@
//
// Original implementation taken from rust-memchr
// Original implementation taken from rust-memchr.
// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch

// Fallback memchr is fastest on windows
// Fallback memchr is fastest on Windows.
pub use core::slice::memchr::{memchr, memrchr};
2 changes: 1 addition & 1 deletion src/libsyntax/json.rs
Expand Up @@ -7,7 +7,7 @@
//! The format of the JSON output should be considered *unstable*. For now the
//! structs at the end of this file (Diagnostic*) specify the error format.

// FIXME spec the JSON output properly.
// FIXME: spec the JSON output properly.

use source_map::{SourceMap, FilePathMapping};
use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan};
Expand Down
3 changes: 0 additions & 3 deletions src/test/rustdoc/auxiliary/enum_primitive.rs
Expand Up @@ -19,7 +19,6 @@
// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.


//! This crate exports a macro `enum_from_primitive!` that wraps an
//! `enum` declaration and automatically adds an implementation of
//! `num::FromPrimitive` (reexported here), to allow conversion from
Expand Down Expand Up @@ -52,7 +51,6 @@
//! }
//! ```


pub mod num_traits {
pub trait FromPrimitive: Sized {
fn from_i64(n: i64) -> Option<Self>;
Expand Down Expand Up @@ -207,4 +205,3 @@ macro_rules! enum_from_primitive {
enum_from_primitive_impl! { $name, $( $( $variant )+ )+ }
};
}

6 changes: 3 additions & 3 deletions src/tools/tidy/src/bins.rs
Expand Up @@ -2,12 +2,12 @@
//! by accident.
//!
//! In the past we've accidentally checked in test binaries and such which add a
//! huge amount of bloat to the git history, so it's good to just ensure we
//! don't do that again :)
//! huge amount of bloat to the Git history, so it's good to just ensure we
//! don't do that again.

use std::path::Path;

// All files are executable on Windows, so just check on Unix
// All files are executable on Windows, so just check on Unix.
#[cfg(windows)]
pub fn check(_path: &Path, _bad: &mut bool) {}

Expand Down
26 changes: 14 additions & 12 deletions src/tools/tidy/src/cargo.rs
Expand Up @@ -13,7 +13,7 @@ pub fn check(path: &Path, bad: &mut bool) {
return
}
for entry in t!(path.read_dir(), path).map(|e| t!(e)) {
// Look for `Cargo.toml` with a sibling `src/lib.rs` or `lib.rs`
// Look for `Cargo.toml` with a sibling `src/lib.rs` or `lib.rs`.
if entry.file_name().to_str() == Some("Cargo.toml") {
if path.join("src/lib.rs").is_file() {
verify(&entry.path(), &path.join("src/lib.rs"), bad)
Expand All @@ -27,8 +27,8 @@ pub fn check(path: &Path, bad: &mut bool) {
}
}

// Verify that the dependencies in Cargo.toml at `tomlfile` are sync'd with the
// `extern crate` annotations in the lib.rs at `libfile`.
/// Verifies that the dependencies in Cargo.toml at `tomlfile` are synced with
/// the `extern crate` annotations in the lib.rs at `libfile`.
fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) {
let toml = t!(fs::read_to_string(&tomlfile));
let librs = t!(fs::read_to_string(&libfile));
Expand All @@ -37,14 +37,16 @@ fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) {
return
}

// "Poor man's TOML parser", just assume we use one syntax for now
// "Poor man's TOML parser" -- just assume we use one syntax for now.
//
// We just look for:
//
// [dependencies]
// name = ...
// name2 = ...
// name3 = ...
// ````
// [dependencies]
// name = ...
// name2 = ...
// name3 = ...
// ```
//
// If we encounter a line starting with `[` then we assume it's the end of
// the dependency section and bail out.
Expand All @@ -63,14 +65,14 @@ fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) {
continue
}

// Don't worry about depending on core/std but not saying `extern crate
// core/std`, that's intentional.
// Don't worry about depending on core/std while not writing `extern crate
// core/std` -- that's intentional.
if krate == "core" || krate == "std" {
continue
}

// This is intentional, this dependency just makes the crate available
// for others later on. Cover cases
// This is intentional -- this dependency just makes the crate available
// for others later on.
let whitelisted = krate.starts_with("panic");
if toml.contains("name = \"std\"") && whitelisted {
continue
Expand Down
53 changes: 27 additions & 26 deletions src/tools/tidy/src/deps.rs
@@ -1,4 +1,4 @@
//! Check license of third-party deps by inspecting vendor
//! Checks the licenses of third-party dependencies by inspecting vendors.

use std::collections::{BTreeSet, HashSet, HashMap};
use std::fs;
Expand All @@ -21,7 +21,7 @@ const LICENSES: &[&str] = &[
/// These are exceptions to Rust's permissive licensing policy, and
/// should be considered bugs. Exceptions are only allowed in Rust
/// tooling. It is _crucial_ that no exception crates be dependencies
/// of the Rust runtime (std / test).
/// of the Rust runtime (std/test).
const EXCEPTIONS: &[&str] = &[
"mdbook", // MPL2, mdbook
"openssl", // BSD+advertising clause, cargo, mdbook
Expand All @@ -39,11 +39,11 @@ const EXCEPTIONS: &[&str] = &[
"colored", // MPL-2.0, rustfmt
"ordslice", // Apache-2.0, rls
"cloudabi", // BSD-2-Clause, (rls -> crossbeam-channel 0.2 -> rand 0.5)
"ryu", // Apache-2.0, rls/cargo/... (b/c of serde)
"ryu", // Apache-2.0, rls/cargo/... (because of serde)
"bytesize", // Apache-2.0, cargo
"im-rc", // MPL-2.0+, cargo
"adler32", // BSD-3-Clause AND Zlib, cargo dep that isn't used
"fortanix-sgx-abi", // MPL-2.0+, libstd but only for sgx target
"fortanix-sgx-abi", // MPL-2.0+, libstd but only for `sgx` target
];

/// Which crates to check against the whitelist?
Expand Down Expand Up @@ -156,7 +156,7 @@ const WHITELIST: &[Crate] = &[
Crate("wincolor"),
];

// Some types for Serde to deserialize the output of `cargo metadata` to...
// Some types for Serde to deserialize the output of `cargo metadata` to.

#[derive(Deserialize)]
struct Output {
Expand All @@ -174,9 +174,9 @@ struct ResolveNode {
dependencies: Vec<String>,
}

/// A unique identifier for a crate
/// A unique identifier for a crate.
#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]
struct Crate<'a>(&'a str); // (name,)
struct Crate<'a>(&'a str); // (name)

#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]
struct CrateVersion<'a>(&'a str, &'a str); // (name, version)
Expand All @@ -188,7 +188,7 @@ impl<'a> Crate<'a> {
}

impl<'a> CrateVersion<'a> {
/// Returns the struct and whether or not the dep is in-tree
/// Returns the struct and whether or not the dependency is in-tree.
pub fn from_str(s: &'a str) -> (Self, bool) {
let mut parts = s.split(' ');
let name = parts.next().unwrap();
Expand All @@ -215,15 +215,15 @@ impl<'a> From<CrateVersion<'a>> for Crate<'a> {
///
/// Specifically, this checks that the license is correct.
pub fn check(path: &Path, bad: &mut bool) {
// Check licences
// Check licences.
let path = path.join("../vendor");
assert!(path.exists(), "vendor directory missing");
let mut saw_dir = false;
for dir in t!(path.read_dir()) {
saw_dir = true;
let dir = t!(dir);

// skip our exceptions
// Skip our exceptions.
let is_exception = EXCEPTIONS.iter().any(|exception| {
dir.path()
.to_str()
Expand All @@ -240,18 +240,18 @@ pub fn check(path: &Path, bad: &mut bool) {
assert!(saw_dir, "no vendored source");
}

/// Checks the dependency of WHITELIST_CRATES at the given path. Changes `bad` to `true` if a check
/// failed.
/// Checks the dependency of `WHITELIST_CRATES` at the given path. Changes `bad` to `true` if a
/// check failed.
///
/// Specifically, this checks that the dependencies are on the WHITELIST.
/// Specifically, this checks that the dependencies are on the `WHITELIST`.
pub fn check_whitelist(path: &Path, cargo: &Path, bad: &mut bool) {
// Get dependencies from cargo metadata
// Get dependencies from Cargo metadata.
let resolve = get_deps(path, cargo);

// Get the whitelist into a convenient form
// Get the whitelist in a convenient form.
let whitelist: HashSet<_> = WHITELIST.iter().cloned().collect();

// Check dependencies
// Check dependencies.
let mut visited = BTreeSet::new();
let mut unapproved = BTreeSet::new();
for &krate in WHITELIST_CRATES.iter() {
Expand Down Expand Up @@ -308,9 +308,9 @@ fn extract_license(line: &str) -> String {
}
}

/// Get the dependencies of the crate at the given path using `cargo metadata`.
/// Gets the dependencies of the crate at the given path using `cargo metadata`.
fn get_deps(path: &Path, cargo: &Path) -> Resolve {
// Run `cargo metadata` to get the set of dependencies
// Run `cargo metadata` to get the set of dependencies.
let output = Command::new(cargo)
.arg("metadata")
.arg("--format-version")
Expand All @@ -335,25 +335,25 @@ fn check_crate_whitelist<'a, 'b>(
krate: CrateVersion<'a>,
must_be_on_whitelist: bool,
) -> BTreeSet<Crate<'a>> {
// Will contain bad deps
// This will contain bad deps.
let mut unapproved = BTreeSet::new();

// Check if we have already visited this crate
// Check if we have already visited this crate.
if visited.contains(&krate) {
return unapproved;
}

visited.insert(krate);

// If this path is in-tree, we don't require it to be on the whitelist
// If this path is in-tree, we don't require it to be on the whitelist.
if must_be_on_whitelist {
// If this dependency is not on the WHITELIST, add to bad set
// If this dependency is not on `WHITELIST`, add to bad set.
if !whitelist.contains(&krate.into()) {
unapproved.insert(krate.into());
}
}

// Do a DFS in the crate graph (it's a DAG, so we know we have no cycles!)
// Do a DFS in the crate graph (it's a DAG, so we know we have no cycles!).
let to_check = resolve
.nodes
.iter()
Expand All @@ -372,9 +372,10 @@ fn check_crate_whitelist<'a, 'b>(

fn check_crate_duplicate(resolve: &Resolve, bad: &mut bool) {
const FORBIDDEN_TO_HAVE_DUPLICATES: &[&str] = &[
// These two crates take quite a long time to build, let's not let two
// versions of them accidentally sneak into our dependency graph to
// ensure we keep our CI times under control
// These two crates take quite a long time to build, so don't allow two versions of them
// to accidentally sneak into our dependency graph, in order to ensure we keep our CI times
// under control.

// "cargo", // FIXME(#53005)
"rustc-ap-syntax",
];
Expand Down

0 comments on commit 88336ea

Please sign in to comment.