Copy path View file
@@ -1,6 +1,7 @@
use std::any::{Any, TypeId};
use std::borrow::Borrow;
use std::cell::RefCell;
use std::cmp::{Ord, Ordering, PartialOrd};
use std::collections::HashMap;
use std::convert::AsRef;
use std::ffi::OsStr;
@@ -11,7 +12,6 @@ use std::mem;
use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::sync::Mutex;
use std::cmp::{PartialOrd, Ord, Ordering};

use crate::builder::Step;

@@ -45,7 +45,7 @@ impl<T> Eq for Interned<T> {}

impl PartialEq<str> for Interned<String> {
fn eq(&self, other: &str) -> bool {
*self == other
*self == other
}
}
impl<'a> PartialEq<&'a str> for Interned<String> {
@@ -176,14 +176,14 @@ impl<T: Hash + Clone + Eq> Default for TyIntern<T> {
impl<T: Hash + Clone + Eq> TyIntern<T> {
fn intern_borrow<B>(&mut self, item: &B) -> Interned<T>
where
B: Eq + Hash + ToOwned<Owned=T> + ?Sized,
B: Eq + Hash + ToOwned<Owned = T> + ?Sized,
T: Borrow<B>,
{
if let Some(i) = self.set.get(&item) {
return *i;
}
let item = item.to_owned();
let interned = Interned(self.items.len(), PhantomData::<*const T>);
let interned = Interned(self.items.len(), PhantomData::<*const T>);
self.set.insert(item.clone(), interned);
self.items.push(item);
interned
@@ -193,7 +193,7 @@ impl<T: Hash + Clone + Eq> TyIntern<T> {
if let Some(i) = self.set.get(&item) {
return *i;
}
let interned = Interned(self.items.len(), PhantomData::<*const T>);
let interned = Interned(self.items.len(), PhantomData::<*const T>);
self.set.insert(item.clone(), interned);
self.items.push(item);
interned
@@ -233,10 +233,12 @@ lazy_static! {
/// get() method.
#[derive(Debug)]
pub struct Cache(
RefCell<HashMap<
TypeId,
Box<dyn Any>, // actually a HashMap<Step, Interned<Step::Output>>
>>
RefCell<
HashMap<
TypeId,
Box<dyn Any>, // actually a HashMap<Step, Interned<Step::Output>>
>,
>,
);

impl Cache {
@@ -247,29 +249,36 @@ impl Cache {
pub fn put<S: Step>(&self, step: S, value: S::Output) {
let mut cache = self.0.borrow_mut();
let type_id = TypeId::of::<S>();
let stepcache = cache.entry(type_id)
.or_insert_with(|| Box::new(HashMap::<S, S::Output>::new()))
.downcast_mut::<HashMap<S, S::Output>>()
.expect("invalid type mapped");
assert!(!stepcache.contains_key(&step), "processing {:?} a second time", step);
let stepcache = cache
.entry(type_id)
.or_insert_with(|| Box::new(HashMap::<S, S::Output>::new()))
.downcast_mut::<HashMap<S, S::Output>>()
.expect("invalid type mapped");
assert!(
!stepcache.contains_key(&step),
"processing {:?} a second time",
step
);
stepcache.insert(step, value);
}

pub fn get<S: Step>(&self, step: &S) -> Option<S::Output> {
let mut cache = self.0.borrow_mut();
let type_id = TypeId::of::<S>();
let stepcache = cache.entry(type_id)
.or_insert_with(|| Box::new(HashMap::<S, S::Output>::new()))
.downcast_mut::<HashMap<S, S::Output>>()
.expect("invalid type mapped");
let stepcache = cache
.entry(type_id)
.or_insert_with(|| Box::new(HashMap::<S, S::Output>::new()))
.downcast_mut::<HashMap<S, S::Output>>()
.expect("invalid type mapped");
stepcache.get(step).cloned()
}

#[cfg(test)]
pub fn all<S: Ord + Copy + Step>(&mut self) -> Vec<(S, S::Output)> {
let cache = self.0.get_mut();
let type_id = TypeId::of::<S>();
let mut v = cache.remove(&type_id)
let mut v = cache
.remove(&type_id)
.map(|b| b.downcast::<HashMap<S, S::Output>>().expect("correct type"))
.map(|m| m.into_iter().collect::<Vec<_>>())
.unwrap_or_default();
Copy path View file
@@ -22,16 +22,16 @@
//! everything.

use std::collections::HashSet;
use std::{env, iter};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::{env, iter};

use build_helper::output;
use cc;

use crate::{Build, GitRepo};
use crate::config::Target;
use crate::cache::Interned;
use crate::config::Target;
use crate::{Build, GitRepo};

// The `cc` crate doesn't provide a way to obtain a path to the detected archiver,
// so use some simplified logic here. First we respect the environment variable `AR`, then
@@ -63,14 +63,25 @@ fn cc2ar(cc: &Path, target: &str) -> Option<PathBuf> {
pub fn find(build: &mut Build) {
// For all targets we're going to need a C compiler for building some shims
// and such as well as for being a linker for Rust code.
let targets = build.targets.iter().chain(&build.hosts).cloned().chain(iter::once(build.build))
.collect::<HashSet<_>>();
let targets = build
.targets
.iter()
.chain(&build.hosts)
.cloned()
.chain(iter::once(build.build))
.collect::<HashSet<_>>();
for target in targets.into_iter() {
let mut cfg = cc::Build::new();
cfg.cargo_metadata(false).opt_level(2).warnings(false).debug(false)
.target(&target).host(&build.build);
cfg.cargo_metadata(false)
.opt_level(2)
.warnings(false)
.debug(false)
.target(&target)
.host(&build.build);
match build.crt_static(target) {
Some(a) => { cfg.static_crt(a); }
Some(a) => {
cfg.static_crt(a);
}
None => {
if target.contains("msvc") {
cfg.static_crt(true);
@@ -97,19 +108,33 @@ pub fn find(build: &mut Build) {

build.cc.insert(target, compiler);
build.verbose(&format!("CC_{} = {:?}", &target, build.cc(target)));
build.verbose(&format!("CFLAGS_{} = {:?}", &target, build.cflags(target, GitRepo::Rustc)));
build.verbose(&format!(
"CFLAGS_{} = {:?}",
&target,
build.cflags(target, GitRepo::Rustc)
));
if let Some(ar) = ar {
build.verbose(&format!("AR_{} = {:?}", &target, ar));
build.ar.insert(target, ar);
}
}

// For all host triples we need to find a C++ compiler as well
let hosts = build.hosts.iter().cloned().chain(iter::once(build.build)).collect::<HashSet<_>>();
let hosts = build
.hosts
.iter()
.cloned()
.chain(iter::once(build.build))
.collect::<HashSet<_>>();
for host in hosts.into_iter() {
let mut cfg = cc::Build::new();
cfg.cargo_metadata(false).opt_level(2).warnings(false).debug(false).cpp(true)
.target(&host).host(&build.build);
cfg.cargo_metadata(false)
.opt_level(2)
.warnings(false)
.debug(false)
.cpp(true)
.target(&host)
.host(&build.build);
let config = build.config.target_config.get(&host);
if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
cfg.compiler(cxx);
@@ -122,21 +147,24 @@ pub fn find(build: &mut Build) {
}
}

fn set_compiler(cfg: &mut cc::Build,
compiler: Language,
target: Interned<String>,
config: Option<&Target>,
build: &Build) {
fn set_compiler(
cfg: &mut cc::Build,
compiler: Language,
target: Interned<String>,
config: Option<&Target>,
build: &Build,
) {
match &*target {
// When compiling for android we may have the NDK configured in the
// config.toml in which case we look there. Otherwise the default
// compiler already takes into account the triple in question.
t if t.contains("android") => {
if let Some(ndk) = config.and_then(|c| c.ndk.as_ref()) {
let target = target.replace("armv7neon", "arm")
.replace("armv7", "arm")
.replace("thumbv7neon", "arm")
.replace("thumbv7", "arm");
let target = target
.replace("armv7neon", "arm")
.replace("armv7", "arm")
.replace("thumbv7neon", "arm")
.replace("thumbv7", "arm");
let compiler = format!("{}-{}", target, compiler.clang());
cfg.compiler(ndk.join("bin").join(compiler));
}
@@ -148,7 +176,7 @@ fn set_compiler(cfg: &mut cc::Build,
let c = cfg.get_compiler();
let gnu_compiler = compiler.gcc();
if !c.path().ends_with(gnu_compiler) {
return
return;
}

let output = output(c.to_command().arg("--version"));
@@ -157,7 +185,7 @@ fn set_compiler(cfg: &mut cc::Build,
None => return,
};
match output[i + 3..].chars().next().unwrap() {
'0' ... '6' => {}
'0'...'6' => {}
_ => return,
}
let alternative = format!("e{}", gnu_compiler);
Copy path View file
@@ -10,8 +10,8 @@ use std::process::Command;

use build_helper::output;

use crate::Build;
use crate::config::Config;
use crate::Build;

// The version number
pub const CFG_RELEASE_NUM: &str = "1.33.0";
@@ -30,31 +30,41 @@ impl GitInfo {
pub fn new(config: &Config, dir: &Path) -> GitInfo {
// See if this even begins to look like a git dir
if config.ignore_git || !dir.join(".git").exists() {
return GitInfo { inner: None }
return GitInfo { inner: None };
}

// Make sure git commands work
let out = Command::new("git")
.arg("rev-parse")
.current_dir(dir)
.output()
.expect("failed to spawn git");
.arg("rev-parse")
.current_dir(dir)
.output()
.expect("failed to spawn git");
if !out.status.success() {
return GitInfo { inner: None }
return GitInfo { inner: None };
}

// Ok, let's scrape some info
let ver_date = output(Command::new("git").current_dir(dir)
.arg("log").arg("-1")
.arg("--date=short")
.arg("--pretty=format:%cd"));
let ver_hash = output(Command::new("git").current_dir(dir)
.arg("rev-parse").arg("HEAD"));
let short_ver_hash = output(Command::new("git")
.current_dir(dir)
.arg("rev-parse")
.arg("--short=9")
.arg("HEAD"));
let ver_date = output(
Command::new("git")
.current_dir(dir)
.arg("log")
.arg("-1")
.arg("--date=short")
.arg("--pretty=format:%cd"),
);
let ver_hash = output(
Command::new("git")
.current_dir(dir)
.arg("rev-parse")
.arg("HEAD"),
);
let short_ver_hash = output(
Command::new("git")
.current_dir(dir)
.arg("rev-parse")
.arg("--short=9")
.arg("HEAD"),
);
GitInfo {
inner: Some(Info {
commit_date: ver_date.trim().to_string(),
Copy path View file
@@ -1,11 +1,12 @@
//! Implementation of compiling the compiler and standard library, in "check" mode.

use crate::compile::{run_cargo, std_cargo, test_cargo, rustc_cargo, rustc_cargo_env,
add_to_sysroot};
use crate::builder::{RunConfig, Builder, ShouldRun, Step};
use crate::builder::{Builder, RunConfig, ShouldRun, Step};
use crate::cache::{Interned, INTERNER};
use crate::compile::{
add_to_sysroot, run_cargo, rustc_cargo, rustc_cargo_env, std_cargo, test_cargo,
};
use crate::tool::{prepare_tool_cargo, SourceType};
use crate::{Compiler, Mode};
use crate::cache::{INTERNER, Interned};
use std::path::PathBuf;

#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
@@ -22,9 +23,7 @@ impl Step for Std {
}

fn make_run(run: RunConfig) {
run.builder.ensure(Std {
target: run.target,
});
run.builder.ensure(Std { target: run.target });
}

fn run(self, builder: &Builder) {
@@ -35,12 +34,17 @@ impl Step for Std {
std_cargo(builder, &compiler, target, &mut cargo);

let _folder = builder.fold_output(|| format!("stage{}-std", compiler.stage));
builder.info(&format!("Checking std artifacts ({} -> {})", &compiler.host, target));
run_cargo(builder,
&mut cargo,
vec![],
&libstd_stamp(builder, compiler, target),
true);
builder.info(&format!(
"Checking std artifacts ({} -> {})",
&compiler.host, target
));
run_cargo(
builder,
&mut cargo,
vec![],
&libstd_stamp(builder, compiler, target),
true,
);

let libdir = builder.sysroot_libdir(compiler, target);
add_to_sysroot(&builder, &libdir, &libstd_stamp(builder, compiler, target));
@@ -62,9 +66,7 @@ impl Step for Rustc {
}

fn make_run(run: RunConfig) {
run.builder.ensure(Rustc {
target: run.target,
});
run.builder.ensure(Rustc { target: run.target });
}

/// Build the compiler.
@@ -82,15 +84,24 @@ impl Step for Rustc {
rustc_cargo(builder, &mut cargo);

let _folder = builder.fold_output(|| format!("stage{}-rustc", compiler.stage));
builder.info(&format!("Checking compiler artifacts ({} -> {})", &compiler.host, target));
run_cargo(builder,
&mut cargo,
vec![],
&librustc_stamp(builder, compiler, target),
true);
builder.info(&format!(
"Checking compiler artifacts ({} -> {})",
&compiler.host, target
));
run_cargo(
builder,
&mut cargo,
vec![],
&librustc_stamp(builder, compiler, target),
true,
);

let libdir = builder.sysroot_libdir(compiler, target);
add_to_sysroot(&builder, &libdir, &librustc_stamp(builder, compiler, target));
add_to_sysroot(
&builder,
&libdir,
&librustc_stamp(builder, compiler, target),
);
}
}

@@ -111,9 +122,9 @@ impl Step for CodegenBackend {

fn make_run(run: RunConfig) {
let backend = run.builder.config.rust_codegen_backends.get(0);
let backend = backend.cloned().unwrap_or_else(|| {
INTERNER.intern_str("llvm")
});
let backend = backend
.cloned()
.unwrap_or_else(|| INTERNER.intern_str("llvm"));
run.builder.ensure(CodegenBackend {
target: run.target,
backend,
@@ -128,17 +139,21 @@ impl Step for CodegenBackend {
builder.ensure(Rustc { target });

let mut cargo = builder.cargo(compiler, Mode::Codegen, target, "check");
cargo.arg("--manifest-path").arg(builder.src.join("src/librustc_codegen_llvm/Cargo.toml"));
cargo
.arg("--manifest-path")
.arg(builder.src.join("src/librustc_codegen_llvm/Cargo.toml"));
rustc_cargo_env(builder, &mut cargo);

// We won't build LLVM if it's not available, as it shouldn't affect `check`.

let _folder = builder.fold_output(|| format!("stage{}-rustc_codegen_llvm", compiler.stage));
run_cargo(builder,
&mut cargo,
vec![],
&codegen_backend_stamp(builder, compiler, target, backend),
true);
run_cargo(
builder,
&mut cargo,
vec![],
&codegen_backend_stamp(builder, compiler, target, backend),
true,
);
}
}

@@ -156,9 +171,7 @@ impl Step for Test {
}

fn make_run(run: RunConfig) {
run.builder.ensure(Test {
target: run.target,
});
run.builder.ensure(Test { target: run.target });
}

fn run(self, builder: &Builder) {
@@ -171,12 +184,17 @@ impl Step for Test {
test_cargo(builder, &compiler, target, &mut cargo);

let _folder = builder.fold_output(|| format!("stage{}-test", compiler.stage));
builder.info(&format!("Checking test artifacts ({} -> {})", &compiler.host, target));
run_cargo(builder,
&mut cargo,
vec![],
&libtest_stamp(builder, compiler, target),
true);
builder.info(&format!(
"Checking test artifacts ({} -> {})",
&compiler.host, target
));
run_cargo(
builder,
&mut cargo,
vec![],
&libtest_stamp(builder, compiler, target),
true,
);

let libdir = builder.sysroot_libdir(compiler, target);
add_to_sysroot(builder, &libdir, &libtest_stamp(builder, compiler, target));
@@ -198,9 +216,7 @@ impl Step for Rustdoc {
}

fn make_run(run: RunConfig) {
run.builder.ensure(Rustdoc {
target: run.target,
});
run.builder.ensure(Rustdoc { target: run.target });
}

fn run(self, builder: &Builder) {
@@ -209,22 +225,29 @@ impl Step for Rustdoc {

builder.ensure(Rustc { target });

let mut cargo = prepare_tool_cargo(builder,
compiler,
Mode::ToolRustc,
target,
"check",
"src/tools/rustdoc",
SourceType::InTree,
&[]);
let mut cargo = prepare_tool_cargo(
builder,
compiler,
Mode::ToolRustc,
target,
"check",
"src/tools/rustdoc",
SourceType::InTree,
&[],
);

let _folder = builder.fold_output(|| format!("stage{}-rustdoc", compiler.stage));
println!("Checking rustdoc artifacts ({} -> {})", &compiler.host, target);
run_cargo(builder,
&mut cargo,
vec![],
&rustdoc_stamp(builder, compiler, target),
true);
println!(
"Checking rustdoc artifacts ({} -> {})",
&compiler.host, target
);
run_cargo(
builder,
&mut cargo,
vec![],
&rustdoc_stamp(builder, compiler, target),
true,
);

let libdir = builder.sysroot_libdir(compiler, target);
add_to_sysroot(&builder, &libdir, &rustdoc_stamp(builder, compiler, target));
@@ -235,34 +258,44 @@ impl Step for Rustdoc {
/// Cargo's output path for the standard library in a given stage, compiled
/// by a particular compiler for the specified target.
pub fn libstd_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
builder.cargo_out(compiler, Mode::Std, target).join(".libstd-check.stamp")
builder
.cargo_out(compiler, Mode::Std, target)
.join(".libstd-check.stamp")
}

/// Cargo's output path for libtest in a given stage, compiled by a particular
/// compiler for the specified target.
pub fn libtest_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
builder.cargo_out(compiler, Mode::Test, target).join(".libtest-check.stamp")
builder
.cargo_out(compiler, Mode::Test, target)
.join(".libtest-check.stamp")
}

/// Cargo's output path for librustc in a given stage, compiled by a particular
/// compiler for the specified target.
pub fn librustc_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
builder.cargo_out(compiler, Mode::Rustc, target).join(".librustc-check.stamp")
builder
.cargo_out(compiler, Mode::Rustc, target)
.join(".librustc-check.stamp")
}

/// Cargo's output path for librustc_codegen_llvm in a given stage, compiled by a particular
/// compiler for the specified target and backend.
fn codegen_backend_stamp(builder: &Builder,
compiler: Compiler,
target: Interned<String>,
backend: Interned<String>) -> PathBuf {
builder.cargo_out(compiler, Mode::Codegen, target)
.join(format!(".librustc_codegen_llvm-{}-check.stamp", backend))
fn codegen_backend_stamp(
builder: &Builder,
compiler: Compiler,
target: Interned<String>,
backend: Interned<String>,
) -> PathBuf {
builder
.cargo_out(compiler, Mode::Codegen, target)
.join(format!(".librustc_codegen_llvm-{}-check.stamp", backend))
}

/// Cargo's output path for rustdoc in a given stage, compiled by a particular
/// compiler for the specified target.
pub fn rustdoc_stamp(builder: &Builder, compiler: Compiler, target: Interned<String>) -> PathBuf {
builder.cargo_out(compiler, Mode::ToolRustc, target)
builder
.cargo_out(compiler, Mode::ToolRustc, target)
.join(".rustdoc-check.stamp")
}
Copy path View file
@@ -29,7 +29,7 @@ pub fn clean(build: &Build, all: bool) {
for entry in entries {
let entry = t!(entry);
if entry.file_name().to_str() == Some("llvm") {
continue
continue;
}
let path = t!(entry.path().canonicalize());
rm_rf(&path);
@@ -45,7 +45,7 @@ fn rm_rf(path: &Path) {
return;
}
panic!("failed to get metadata for file {}: {}", path.display(), e);
},
}
Ok(metadata) => {
if metadata.file_type().is_file() || metadata.file_type().is_symlink() {
do_op(path, "remove file", |p| fs::remove_file(p));
@@ -56,20 +56,20 @@ fn rm_rf(path: &Path) {
rm_rf(&t!(file).path());
}
do_op(path, "remove dir", |p| fs::remove_dir(p));
},
}
};
}

fn do_op<F>(path: &Path, desc: &str, mut f: F)
where F: FnMut(&Path) -> io::Result<()>
where
F: FnMut(&Path) -> io::Result<()>,
{
match f(path) {
Ok(()) => {}
// On windows we can't remove a readonly file, and git will often clone files as readonly.
// As a result, we have some special logic to remove readonly files on windows.
// This is also the reason that we can't use things like fs::remove_dir_all().
Err(ref e) if cfg!(windows) &&
e.kind() == ErrorKind::PermissionDenied => {
Err(ref e) if cfg!(windows) && e.kind() == ErrorKind::PermissionDenied => {
let mut p = t!(path.symlink_metadata()).permissions();
p.set_readonly(false);
t!(fs::set_permissions(path, p));
Copy path View file

Large diffs are not rendered by default.

Oops, something went wrong.
Copy path View file
@@ -3,18 +3,18 @@
//! This module implements parsing `config.toml` configuration files to tweak
//! how the build runs.

use std::cmp;
use std::collections::{HashMap, HashSet};
use std::env;
use std::fs;
use std::path::{Path, PathBuf};
use std::process;
use std::cmp;

use num_cpus;
use toml;
use crate::cache::{INTERNER, Interned};
use crate::cache::{Interned, INTERNER};
use crate::flags::Flags;
pub use crate::flags::Subcommand;
use num_cpus;
use toml;

/// Global configuration for the entire build and/or bootstrap.
///
@@ -404,17 +404,22 @@ impl Config {
// If --target was specified but --host wasn't specified, don't run any host-only tests.
config.run_host_only = !(flags.host.is_empty() && !flags.target.is_empty());

let toml = file.map(|file| {
let contents = t!(fs::read_to_string(&file));
match toml::from_str(&contents) {
Ok(table) => table,
Err(err) => {
println!("failed to parse TOML configuration '{}': {}",
file.display(), err);
process::exit(2);
let toml = file
.map(|file| {
let contents = t!(fs::read_to_string(&file));
match toml::from_str(&contents) {
Ok(table) => table,
Err(err) => {
println!(
"failed to parse TOML configuration '{}': {}",
file.display(),
err
);
process::exit(2);
}
}
}
}).unwrap_or_else(|| TomlConfig::default());
})
.unwrap_or_else(|| TomlConfig::default());

let build = toml.build.clone().unwrap_or_default();
// set by bootstrap.py
@@ -425,7 +430,10 @@ impl Config {
config.hosts.push(host);
}
}
for target in config.hosts.iter().cloned()
for target in config
.hosts
.iter()
.cloned()
.chain(build.target.iter().map(|s| INTERNER.intern_str(s)))
{
if !config.targets.contains(&target) {
@@ -443,7 +451,6 @@ impl Config {
config.targets
};


config.nodejs = build.nodejs.map(PathBuf::from);
config.gdb = build.gdb.map(PathBuf::from);
config.python = build.python.map(PathBuf::from);
@@ -490,9 +497,7 @@ impl Config {

if let Some(ref llvm) = toml.llvm {
match llvm.ccache {
Some(StringOrBool::String(ref s)) => {
config.ccache = Some(s.to_string())
}
Some(StringOrBool::String(ref s)) => config.ccache = Some(s.to_string()),
Some(StringOrBool::Bool(true)) => {
config.ccache = Some("ccache".to_string());
}
@@ -508,7 +513,9 @@ impl Config {
set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp);
set(&mut config.llvm_link_shared, llvm.link_shared);
config.llvm_targets = llvm.targets.clone();
config.llvm_experimental_targets = llvm.experimental_targets.clone()
config.llvm_experimental_targets = llvm
.experimental_targets
.clone()
.unwrap_or_else(|| "WebAssembly;RISCV".to_string());
config.llvm_link_jobs = llvm.link_jobs;
config.llvm_version_suffix = llvm.version_suffix.clone();
@@ -547,18 +554,23 @@ impl Config {
config.rustc_default_linker = rust.default_linker.clone();
config.musl_root = rust.musl_root.clone().map(PathBuf::from);
config.save_toolstates = rust.save_toolstates.clone().map(PathBuf::from);
set(&mut config.deny_warnings, rust.deny_warnings.or(flags.warnings));
set(
&mut config.deny_warnings,
rust.deny_warnings.or(flags.warnings),
);
set(&mut config.backtrace_on_ice, rust.backtrace_on_ice);
set(&mut config.rust_verify_llvm_ir, rust.verify_llvm_ir);
set(&mut config.rust_remap_debuginfo, rust.remap_debuginfo);

if let Some(ref backends) = rust.codegen_backends {
config.rust_codegen_backends = backends.iter()
.map(|s| INTERNER.intern_str(s))
.collect();
config.rust_codegen_backends =
backends.iter().map(|s| INTERNER.intern_str(s)).collect();
}

set(&mut config.rust_codegen_backends_dir, rust.codegen_backends_dir.clone());
set(
&mut config.rust_codegen_backends_dir,
rust.codegen_backends_dir.clone(),
);

match rust.codegen_units {
Some(0) => config.rust_codegen_units = Some(num_cpus::get() as u32),
@@ -591,7 +603,9 @@ impl Config {
target.musl_root = cfg.musl_root.clone().map(PathBuf::from);
target.qemu_rootfs = cfg.qemu_rootfs.clone().map(PathBuf::from);

config.target_config.insert(INTERNER.intern_string(triple.clone()), target);
config
.target_config
.insert(INTERNER.intern_string(triple.clone()), target);
}
}

Copy path View file

Large diffs are not rendered by default.

Oops, something went wrong.
Copy path View file

Large diffs are not rendered by default.

Oops, something went wrong.
Copy path View file
@@ -83,7 +83,8 @@ impl Default for Subcommand {
impl Flags {
pub fn parse(args: &[String]) -> Flags {
let mut extra_help = String::new();
let mut subcommand_help = String::from("\
let mut subcommand_help = String::from(
"\
Usage: x.py <subcommand> [options] [<paths>...]
Subcommands:
@@ -96,7 +97,7 @@ Subcommands:
dist Build distribution artifacts
install Install distribution artifacts
To learn more about a subcommand, run `./x.py <subcommand> -h`"
To learn more about a subcommand, run `./x.py <subcommand> -h`",
);

let mut opts = Options::new();
@@ -110,12 +111,20 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`"
opts.optmulti("", "exclude", "build paths to exclude", "PATH");
opts.optopt("", "on-fail", "command to run on failure", "CMD");
opts.optflag("", "dry-run", "dry run; don't build anything");
opts.optopt("", "stage",
opts.optopt(
"",
"stage",
"stage to build (indicates compiler to use/test, e.g., stage 0 uses the \
bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)",
"N");
opts.optmulti("", "keep-stage", "stage(s) to keep without recompiling \
(pass multiple times to keep e.g., both stages 0 and 1)", "N");
"N",
);
opts.optmulti(
"",
"keep-stage",
"stage(s) to keep without recompiling \
(pass multiple times to keep e.g., both stages 0 and 1)",
"N",
);
opts.optopt("", "src", "path to the root of the rust checkout", "DIR");
opts.optopt("j", "jobs", "number of jobs to run in parallel", "JOBS");
opts.optflag("h", "help", "print this help message");
@@ -344,7 +353,8 @@ Arguments:
format!(
"Run `./x.py {} -h -v` to see a list of available paths.",
subcommand
).as_str(),
)
.as_str(),
);
}

@@ -399,8 +409,10 @@ Arguments:
dry_run: matches.opt_present("dry-run"),
on_fail: matches.opt_str("on-fail"),
rustc_error_format: matches.opt_str("error-format"),
keep_stage: matches.opt_strs("keep-stage")
.into_iter().map(|j| j.parse().unwrap())
keep_stage: matches
.opt_strs("keep-stage")
.into_iter()
.map(|j| j.parse().unwrap())
.collect(),
host: split(&matches.opt_strs("host"))
.into_iter()
Copy path View file
@@ -5,7 +5,7 @@

use std::env;
use std::fs;
use std::path::{Path, PathBuf, Component};
use std::path::{Component, Path, PathBuf};
use std::process::Command;

use crate::dist::{self, pkgname, sanitize_sh, tmpdir};
@@ -53,7 +53,7 @@ fn install_sh(
package: &str,
name: &str,
stage: u32,
host: Option<Interned<String>>
host: Option<Interned<String>>,
) {
builder.info(&format!("Install {} stage{} ({:?})", package, stage, host));

@@ -67,7 +67,11 @@ fn install_sh(
let prefix = builder.config.prefix.as_ref().map_or(prefix_default, |p| {
fs::canonicalize(p).unwrap_or_else(|_| panic!("could not canonicalize {}", p.display()))
});
let sysconfdir = builder.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);
let sysconfdir = builder
.config
.sysconfdir
.as_ref()
.unwrap_or(&sysconfdir_default);
let datadir = builder.config.datadir.as_ref().unwrap_or(&datadir_default);
let docdir = builder.config.docdir.as_ref().unwrap_or(&docdir_default);
let bindir = builder.config.bindir.as_ref().unwrap_or(&bindir_default);
@@ -102,7 +106,9 @@ fn install_sh(

let mut cmd = Command::new("sh");
cmd.current_dir(&empty_dir)
.arg(sanitize_sh(&tmpdir(builder).join(&package_name).join("install.sh")))
.arg(sanitize_sh(
&tmpdir(builder).join(&package_name).join("install.sh"),
))
.arg(format!("--prefix={}", sanitize_sh(&prefix)))
.arg(format!("--sysconfdir={}", sanitize_sh(&sysconfdir)))
.arg(format!("--datadir={}", sanitize_sh(&datadir)))
@@ -253,8 +259,7 @@ impl Step for Src {

fn should_run(run: ShouldRun) -> ShouldRun {
let config = &run.builder.config;
let cond = config.extended &&
config.tools.as_ref().map_or(true, |t| t.contains("src"));
let cond = config.extended && config.tools.as_ref().map_or(true, |t| t.contains("src"));
run.path("src").default_condition(cond)
}

Copy path View file
@@ -29,10 +29,10 @@

#![allow(nonstandard_style, dead_code)]

use crate::Build;
use std::env;
use std::io;
use std::mem;
use crate::Build;

type HANDLE = *mut u8;
type BOOL = i32;
@@ -60,21 +60,23 @@ extern "system" {
fn CreateJobObjectW(lpJobAttributes: *mut u8, lpName: *const u8) -> HANDLE;
fn CloseHandle(hObject: HANDLE) -> BOOL;
fn GetCurrentProcess() -> HANDLE;
fn OpenProcess(dwDesiredAccess: DWORD,
bInheritHandle: BOOL,
dwProcessId: DWORD) -> HANDLE;
fn DuplicateHandle(hSourceProcessHandle: HANDLE,
hSourceHandle: HANDLE,
hTargetProcessHandle: HANDLE,
lpTargetHandle: LPHANDLE,
dwDesiredAccess: DWORD,
bInheritHandle: BOOL,
dwOptions: DWORD) -> BOOL;
fn OpenProcess(dwDesiredAccess: DWORD, bInheritHandle: BOOL, dwProcessId: DWORD) -> HANDLE;
fn DuplicateHandle(
hSourceProcessHandle: HANDLE,
hSourceHandle: HANDLE,
hTargetProcessHandle: HANDLE,
lpTargetHandle: LPHANDLE,
dwDesiredAccess: DWORD,
bInheritHandle: BOOL,
dwOptions: DWORD,
) -> BOOL;
fn AssignProcessToJobObject(hJob: HANDLE, hProcess: HANDLE) -> BOOL;
fn SetInformationJobObject(hJob: HANDLE,
JobObjectInformationClass: JOBOBJECTINFOCLASS,
lpJobObjectInformation: LPVOID,
cbJobObjectInformationLength: DWORD) -> BOOL;
fn SetInformationJobObject(
hJob: HANDLE,
JobObjectInformationClass: JOBOBJECTINFOCLASS,
lpJobObjectInformation: LPVOID,
cbJobObjectInformationLength: DWORD,
) -> BOOL;
fn SetErrorMode(mode: UINT) -> UINT;
}

@@ -131,10 +133,12 @@ pub unsafe fn setup(build: &mut Build) {
info.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_PRIORITY_CLASS;
info.BasicLimitInformation.PriorityClass = BELOW_NORMAL_PRIORITY_CLASS;
}
let r = SetInformationJobObject(job,
JobObjectExtendedLimitInformation,
&mut info as *mut _ as LPVOID,
mem::size_of_val(&info) as DWORD);
let r = SetInformationJobObject(
job,
JobObjectExtendedLimitInformation,
&mut info as *mut _ as LPVOID,
mem::size_of_val(&info) as DWORD,
);
assert!(r != 0, "{}", io::Error::last_os_error());

// Assign our process to this job object. Note that if this fails, one very
@@ -149,7 +153,7 @@ pub unsafe fn setup(build: &mut Build) {
let r = AssignProcessToJobObject(job, GetCurrentProcess());
if r == 0 {
CloseHandle(job);
return
return;
}

// If we've got a parent process (e.g., the python script that called us)
@@ -168,9 +172,15 @@ pub unsafe fn setup(build: &mut Build) {
let parent = OpenProcess(PROCESS_DUP_HANDLE, FALSE, pid.parse().unwrap());
assert!(parent != 0 as *mut _, "{}", io::Error::last_os_error());
let mut parent_handle = 0 as *mut _;
let r = DuplicateHandle(GetCurrentProcess(), job,
parent, &mut parent_handle,
0, FALSE, DUPLICATE_SAME_ACCESS);
let r = DuplicateHandle(
GetCurrentProcess(),
job,
parent,
&mut parent_handle,
0,
FALSE,
DUPLICATE_SAME_ACCESS,
);

// If this failed, well at least we tried! An example of DuplicateHandle
// failing in the past has been when the wrong python2 package spawned this
Copy path View file

Large diffs are not rendered by default.

Oops, something went wrong.
Copy path View file
@@ -1,13 +1,13 @@
use std::collections::HashMap;
use std::process::Command;
use std::path::PathBuf;
use std::collections::HashSet;
use std::path::PathBuf;
use std::process::Command;

use build_helper::output;
use serde_json;

use crate::{Build, Crate};
use crate::cache::INTERNER;
use crate::{Build, Crate};

#[derive(Deserialize)]
struct Output {
@@ -71,28 +71,35 @@ fn build_krate(features: &str, build: &mut Build, resolves: &mut Vec<ResolveNode
// to know what crates to test. Here we run `cargo metadata` to learn about
// the dependency graph and what `-p` arguments there are.
let mut cargo = Command::new(&build.initial_cargo);
cargo.arg("metadata")
.arg("--format-version").arg("1")
.arg("--features").arg(features)
.arg("--manifest-path").arg(build.src.join(krate).join("Cargo.toml"));
cargo
.arg("metadata")
.arg("--format-version")
.arg("1")
.arg("--features")
.arg(features)
.arg("--manifest-path")
.arg(build.src.join(krate).join("Cargo.toml"));
let output = output(&mut cargo);
let output: Output = serde_json::from_str(&output).unwrap();
for package in output.packages {
if package.source.is_none() {
let name = INTERNER.intern_string(package.name);
let mut path = PathBuf::from(package.manifest_path);
path.pop();
build.crates.insert(name, Crate {
build_step: format!("build-crate-{}", name),
doc_step: format!("doc-crate-{}", name),
test_step: format!("test-crate-{}", name),
bench_step: format!("bench-crate-{}", name),
build.crates.insert(
name,
version: package.version,
id: package.id,
deps: HashSet::new(),
path,
});
Crate {
build_step: format!("build-crate-{}", name),
doc_step: format!("doc-crate-{}", name),
test_step: format!("test-crate-{}", name),
bench_step: format!("bench-crate-{}", name),
name,
version: package.version,
id: package.id,
deps: HashSet::new(),
path,
},
);
}
}
resolves.extend(output.resolve.nodes);
Copy path View file

Large diffs are not rendered by default.

Oops, something went wrong.
Copy path View file
@@ -10,7 +10,7 @@

use std::collections::HashMap;
use std::env;
use std::ffi::{OsString, OsStr};
use std::ffi::{OsStr, OsString};
use std::fs;
use std::path::PathBuf;
use std::process::Command;
@@ -28,26 +28,31 @@ impl Finder {
fn new() -> Self {
Self {
cache: HashMap::new(),
path: env::var_os("PATH").unwrap_or_default()
path: env::var_os("PATH").unwrap_or_default(),
}
}

fn maybe_have<S: AsRef<OsStr>>(&mut self, cmd: S) -> Option<PathBuf> {
let cmd: OsString = cmd.as_ref().into();
let path = self.path.clone();
self.cache.entry(cmd.clone()).or_insert_with(|| {
for path in env::split_paths(&path) {
let target = path.join(&cmd);
let mut cmd_alt = cmd.clone();
cmd_alt.push(".exe");
if target.is_file() || // some/path/git
self.cache
.entry(cmd.clone())
.or_insert_with(|| {
for path in env::split_paths(&path) {
let target = path.join(&cmd);
let mut cmd_alt = cmd.clone();
cmd_alt.push(".exe");
if target.is_file() || // some/path/git
target.with_extension("exe").exists() || // some/path/git.exe
target.join(&cmd_alt).exists() { // some/path/git/git.exe
return Some(target);
target.join(&cmd_alt).exists()
{
// some/path/git/git.exe
return Some(target);
}
}
}
None
}).clone()
None
})
.clone()
}

fn must_have<S: AsRef<OsStr>>(&mut self, cmd: S) -> PathBuf {
@@ -75,7 +80,9 @@ pub fn check(build: &mut Build) {
}

// We need cmake, but only if we're actually building LLVM or sanitizers.
let building_llvm = build.hosts.iter()
let building_llvm = build
.hosts
.iter()
.filter_map(|host| build.config.target_config.get(host))
.any(|config| config.llvm_config.is_none());
if building_llvm || build.config.sanitizers {
@@ -106,17 +113,29 @@ pub fn check(build: &mut Build) {
}
}

build.config.python = build.config.python.take().map(|p| cmd_finder.must_have(p))
build.config.python = build
.config
.python
.take()
.map(|p| cmd_finder.must_have(p))
.or_else(|| env::var_os("BOOTSTRAP_PYTHON").map(PathBuf::from)) // set by bootstrap.py
.or_else(|| cmd_finder.maybe_have("python2.7"))
.or_else(|| cmd_finder.maybe_have("python2"))
.or_else(|| Some(cmd_finder.must_have("python")));

build.config.nodejs = build.config.nodejs.take().map(|p| cmd_finder.must_have(p))
build.config.nodejs = build
.config
.nodejs
.take()
.map(|p| cmd_finder.must_have(p))
.or_else(|| cmd_finder.maybe_have("node"))
.or_else(|| cmd_finder.maybe_have("nodejs"));

build.config.gdb = build.config.gdb.take().map(|p| cmd_finder.must_have(p))
build.config.gdb = build
.config
.gdb
.take()
.map(|p| cmd_finder.must_have(p))
.or_else(|| cmd_finder.maybe_have("gdb"));

// We're gonna build some custom C code here and there, host triples
@@ -151,14 +170,16 @@ pub fn check(build: &mut Build) {

for target in &build.targets {
// Can't compile for iOS unless we're on macOS
if target.contains("apple-ios") &&
!build.build.contains("apple-darwin") {
if target.contains("apple-ios") && !build.build.contains("apple-darwin") {
panic!("the iOS target is only supported on macOS");
}

if target.contains("-none-") {
if build.no_std(*target).is_none() {
let target = build.config.target_config.entry(target.clone())
let target = build
.config
.target_config
.entry(target.clone())
.or_default();

target.no_std = true;
@@ -174,26 +195,33 @@ pub fn check(build: &mut Build) {
// If this is a native target (host is also musl) and no musl-root is given,
// fall back to the system toolchain in /usr before giving up
if build.musl_root(*target).is_none() && build.config.build == *target {
let target = build.config.target_config.entry(target.clone())
let target = build
.config
.target_config
.entry(target.clone())
.or_default();
target.musl_root = Some("/usr".into());
}
match build.musl_root(*target) {
Some(root) => {
if fs::metadata(root.join("lib/libc.a")).is_err() {
panic!("couldn't find libc.a in musl dir: {}",
root.join("lib").display());
panic!(
"couldn't find libc.a in musl dir: {}",
root.join("lib").display()
);
}
if fs::metadata(root.join("lib/libunwind.a")).is_err() {
panic!("couldn't find libunwind.a in musl dir: {}",
root.join("lib").display());
panic!(
"couldn't find libunwind.a in musl dir: {}",
root.join("lib").display()
);
}
}
None => {
panic!("when targeting MUSL either the rust.musl-root \
option or the target.$TARGET.musl-root option must \
be specified in config.toml")
}
None => panic!(
"when targeting MUSL either the rust.musl-root \
option or the target.$TARGET.musl-root option must \
be specified in config.toml"
),
}
}

@@ -203,7 +231,8 @@ pub fn check(build: &mut Build) {
// Studio, so detect that here and error.
let out = output(Command::new("cmake").arg("--help"));
if !out.contains("Visual Studio") {
panic!("
panic!(
"
cmake does not support Visual Studio generators.
This is likely due to it being an msys/cygwin build of cmake,
@@ -214,7 +243,8 @@ If you are building under msys2 try installing the mingw-w64-x86_64-cmake
package instead of cmake:
$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake
");
"
);
}
}
}
@@ -226,8 +256,10 @@ $ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake
if build.config.channel == "stable" {
let stage0 = t!(fs::read_to_string(build.src.join("src/stage0.txt")));
if stage0.contains("\ndev:") {
panic!("bootstrapping from a dev compiler in a stable release, but \
should only be bootstrapping from a released compiler!");
panic!(
"bootstrapping from a dev compiler in a stable release, but \
should only be bootstrapping from a released compiler!"
);
}
}
}
Copy path View file
@@ -19,11 +19,11 @@ use crate::compile;
use crate::dist;
use crate::flags::Subcommand;
use crate::native;
use crate::tool::{self, Tool, SourceType};
use crate::tool::{self, SourceType, Tool};
use crate::toolstate::ToolState;
use crate::util::{self, dylib_path, dylib_path_var};
use crate::Crate as CargoCrate;
use crate::{DocTests, Mode, GitRepo};
use crate::{DocTests, GitRepo, Mode};

const ADB_TEST_DIR: &str = "/data/tmp/work";

@@ -211,14 +211,16 @@ impl Step for Cargo {
compiler,
target: self.host,
});
let mut cargo = tool::prepare_tool_cargo(builder,
compiler,
Mode::ToolRustc,
self.host,
"test",
"src/tools/cargo",
SourceType::Submodule,
&[]);
let mut cargo = tool::prepare_tool_cargo(
builder,
compiler,
Mode::ToolRustc,
self.host,
"test",
"src/tools/cargo",
SourceType::Submodule,
&[],
);

if !builder.fail_fast {
cargo.arg("--no-fail-fast");
@@ -274,18 +276,19 @@ impl Step for Rls {
return;
}

let mut cargo = tool::prepare_tool_cargo(builder,
compiler,
Mode::ToolRustc,
host,
"test",
"src/tools/rls",
SourceType::Submodule,
&[]);
let mut cargo = tool::prepare_tool_cargo(
builder,
compiler,
Mode::ToolRustc,
host,
"test",
"src/tools/rls",
SourceType::Submodule,
&[],
);

builder.add_rustc_lib_path(compiler, &mut cargo);
cargo.arg("--")
.args(builder.config.cmd.test_args());
cargo.arg("--").args(builder.config.cmd.test_args());

if try_run(builder, &mut cargo) {
builder.save_toolstate("rls", ToolState::TestPass);
@@ -330,14 +333,16 @@ impl Step for Rustfmt {
return;
}

let mut cargo = tool::prepare_tool_cargo(builder,
compiler,
Mode::ToolRustc,
host,
"test",
"src/tools/rustfmt",
SourceType::Submodule,
&[]);
let mut cargo = tool::prepare_tool_cargo(
builder,
compiler,
Mode::ToolRustc,
host,
"test",
"src/tools/rustfmt",
SourceType::Submodule,
&[],
);

let dir = testdir(builder, compiler.host);
t!(fs::create_dir_all(&dir));
@@ -386,14 +391,16 @@ impl Step for Miri {
extra_features: Vec::new(),
});
if let Some(miri) = miri {
let mut cargo = tool::prepare_tool_cargo(builder,
compiler,
Mode::ToolRustc,
host,
"test",
"src/tools/miri",
SourceType::Submodule,
&[]);
let mut cargo = tool::prepare_tool_cargo(
builder,
compiler,
Mode::ToolRustc,
host,
"test",
"src/tools/miri",
SourceType::Submodule,
&[],
);

// miri tests need to know about the stage sysroot
cargo.env("MIRI_SYSROOT", builder.sysroot(compiler));
@@ -438,14 +445,16 @@ impl Step for CompiletestTest {
let host = self.host;
let compiler = builder.compiler(stage, host);

let mut cargo = tool::prepare_tool_cargo(builder,
compiler,
Mode::ToolBootstrap,
host,
"test",
"src/tools/compiletest",
SourceType::InTree,
&[]);
let mut cargo = tool::prepare_tool_cargo(
builder,
compiler,
Mode::ToolBootstrap,
host,
"test",
"src/tools/compiletest",
SourceType::InTree,
&[],
);

try_run(builder, &mut cargo);
}
@@ -485,14 +494,16 @@ impl Step for Clippy {
extra_features: Vec::new(),
});
if let Some(clippy) = clippy {
let mut cargo = tool::prepare_tool_cargo(builder,
compiler,
Mode::ToolRustc,
host,
"test",
"src/tools/clippy",
SourceType::Submodule,
&[]);
let mut cargo = tool::prepare_tool_cargo(
builder,
compiler,
Mode::ToolRustc,
host,
"test",
"src/tools/clippy",
SourceType::Submodule,
&[],
);

// clippy tests need to know about the stage sysroot
cargo.env("SYSROOT", builder.sysroot(compiler));
@@ -605,9 +616,7 @@ impl Step for RustdocJS {
});
builder.run(&mut command);
} else {
builder.info(
"No nodejs found, skipping \"src/test/rustdoc-js\" tests"
);
builder.info("No nodejs found, skipping \"src/test/rustdoc-js\" tests");
}
}
}
@@ -693,38 +702,68 @@ fn testdir(builder: &Builder, host: Interned<String>) -> PathBuf {

macro_rules! default_test {
($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr }) => {
test!($name { path: $path, mode: $mode, suite: $suite, default: true, host: false });
}
test!($name {
path: $path,
mode: $mode,
suite: $suite,
default: true,
host: false
});
};
}

macro_rules! default_test_with_compare_mode {
($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr,
compare_mode: $compare_mode:expr }) => {
test_with_compare_mode!($name { path: $path, mode: $mode, suite: $suite, default: true,
host: false, compare_mode: $compare_mode });
}
test_with_compare_mode!($name {
path: $path,
mode: $mode,
suite: $suite,
default: true,
host: false,
compare_mode: $compare_mode
});
};
}

macro_rules! host_test {
($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr }) => {
test!($name { path: $path, mode: $mode, suite: $suite, default: true, host: true });
}
test!($name {
path: $path,
mode: $mode,
suite: $suite,
default: true,
host: true
});
};
}

macro_rules! test {
($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, default: $default:expr,
host: $host:expr }) => {
test_definitions!($name { path: $path, mode: $mode, suite: $suite, default: $default,
host: $host, compare_mode: None });
}
test_definitions!($name {
path: $path,
mode: $mode,
suite: $suite,
default: $default,
host: $host,
compare_mode: None
});
};
}

macro_rules! test_with_compare_mode {
($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, default: $default:expr,
host: $host:expr, compare_mode: $compare_mode:expr }) => {
test_definitions!($name { path: $path, mode: $mode, suite: $suite, default: $default,
host: $host, compare_mode: Some($compare_mode) });
}
test_definitions!($name {
path: $path,
mode: $mode,
suite: $suite,
default: $default,
host: $host,
compare_mode: Some($compare_mode)
});
};
}

macro_rules! test_definitions {
@@ -771,7 +810,7 @@ macro_rules! test_definitions {
})
}
}
}
};
}

default_test_with_compare_mode!(Ui {
@@ -988,13 +1027,21 @@ impl Step for Compiletest {
if builder.no_std(target) == Some(true) {
// for no_std run-make (e.g., thumb*),
// we need a host compiler which is called by cargo.
builder.ensure(compile::Std { compiler, target: compiler.host });
builder.ensure(compile::Std {
compiler,
target: compiler.host,
});
}

// HACK(eddyb) ensure that `libproc_macro` is available on the host.
builder.ensure(compile::Test { compiler, target: compiler.host });
builder.ensure(compile::Test {
compiler,
target: compiler.host,
});
// Also provide `rust_test_helpers` for the host.
builder.ensure(native::TestHelpers { target: compiler.host });
builder.ensure(native::TestHelpers {
target: compiler.host,
});

builder.ensure(native::TestHelpers { target });
builder.ensure(RemoteCopyLibs { compiler, target });
@@ -1101,23 +1148,22 @@ impl Step for Compiletest {
let run = |cmd: &mut Command| {
cmd.output().map(|output| {
String::from_utf8_lossy(&output.stdout)
.lines().next().unwrap_or_else(|| {
panic!("{:?} failed {:?}", cmd, output)
}).to_string()
.lines()
.next()
.unwrap_or_else(|| panic!("{:?} failed {:?}", cmd, output))
.to_string()
})
};
let lldb_exe = if builder.config.lldb_enabled && !target.contains("emscripten") {
// Test against the lldb that was just built.
builder.llvm_out(target)
.join("bin")
.join("lldb")
builder.llvm_out(target).join("bin").join("lldb")
} else {
PathBuf::from("lldb")
};
let lldb_version = Command::new(&lldb_exe)
.arg("--version")
.output()
.map(|output| { String::from_utf8_lossy(&output.stdout).to_string() })
.map(|output| String::from_utf8_lossy(&output.stdout).to_string())
.ok();
if let Some(ref vers) = lldb_version {
cmd.arg("--lldb-version").arg(vers);
@@ -1136,11 +1182,9 @@ impl Step for Compiletest {
// Get test-args by striping suite path
let mut test_args: Vec<&str> = paths
.iter()
.map(|p| {
match p.strip_prefix(".") {
Ok(path) => path,
Err(_) => p,
}
.map(|p| match p.strip_prefix(".") {
Ok(path) => path,
Err(_) => p,
})
.filter(|p| p.starts_with(suite_path) && p.is_file())
.map(|p| p.strip_prefix(suite_path).unwrap().to_str().unwrap())
@@ -1192,9 +1236,7 @@ impl Step for Compiletest {
}
}
if suite == "run-make-fulldeps" && !builder.config.llvm_enabled {
builder.info(
"Ignoring run-make test suite as they generally don't work without LLVM"
);
builder.info("Ignoring run-make test suite as they generally don't work without LLVM");
return;
}

@@ -1710,7 +1752,7 @@ impl Step for Crate {
if !builder.config.wasm_syscall {
builder.info(
"Libstd was built without `wasm_syscall` feature enabled: \
test output may not be visible."
test output may not be visible.",
);
}

@@ -1786,14 +1828,16 @@ impl Step for CrateRustdoc {
let target = compiler.host;
builder.ensure(compile::Rustc { compiler, target });

let mut cargo = tool::prepare_tool_cargo(builder,
compiler,
Mode::ToolRustc,
target,
test_kind.subcommand(),
"src/tools/rustdoc",
SourceType::InTree,
&[]);
let mut cargo = tool::prepare_tool_cargo(
builder,
compiler,
Mode::ToolRustc,
target,
test_kind.subcommand(),
"src/tools/rustdoc",
SourceType::InTree,
&[],
);
if test_kind.subcommand() == "test" && !builder.fail_fast {
cargo.arg("--no-fail-fast");
}
Copy path View file

Large diffs are not rendered by default.

Oops, something went wrong.
Copy path View file
@@ -4,15 +4,15 @@
//! not a lot of interesting happenings here unfortunately.

use std::env;
use std::str;
use std::fs;
use std::io::{self, Write};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::time::{SystemTime, Instant};
use std::str;
use std::time::{Instant, SystemTime};

use crate::config::Config;
use crate::builder::Builder;
use crate::config::Config;

/// Returns the `name` as the filename of a static library for `target`.
pub fn staticlib(name: &str, target: &str) -> String {
@@ -41,7 +41,11 @@ pub fn is_dylib(name: &str) -> bool {
/// Returns the corresponding relative library directory that the compiler's
/// dylibs will be found in.
pub fn libdir(target: &str) -> &'static str {
if target.contains("windows") {"bin"} else {"lib"}
if target.contains("windows") {
"bin"
} else {
"lib"
}
}

/// Adds a list of lookup paths to `cmd`'s dynamic library lookup path.
@@ -75,7 +79,9 @@ pub fn dylib_path() -> Vec<PathBuf> {

/// `push` all components to `buf`. On windows, append `.exe` to the last component.
pub fn push_exe_path(mut buf: PathBuf, components: &[&str]) -> PathBuf {
let (&file, components) = components.split_last().expect("at least one component required");
let (&file, components) = components
.split_last()
.expect("at least one component required");
let mut file = file.to_owned();

if cfg!(windows) {
@@ -99,17 +105,21 @@ impl Drop for TimeIt {
fn drop(&mut self) {
let time = self.1.elapsed();
if !self.0 {
println!("\tfinished in {}.{:03}",
time.as_secs(),
time.subsec_nanos() / 1_000_000);
println!(
"\tfinished in {}.{:03}",
time.as_secs(),
time.subsec_nanos() / 1_000_000
);
}
}
}

/// Symlinks two directories, using junctions on Windows and normal symlinks on
/// Unix.
pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> {
if config.dry_run { return Ok(()); }
if config.dry_run {
return Ok(());
}
let _ = fs::remove_dir(dest);
return symlink_dir_inner(src, dest);

@@ -129,9 +139,9 @@ pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> {
#[cfg(windows)]
#[allow(nonstandard_style)]
fn symlink_dir_inner(target: &Path, junction: &Path) -> io::Result<()> {
use std::ptr;
use std::ffi::OsStr;
use std::os::windows::ffi::OsStrExt;
use std::ptr;

const MAXIMUM_REPARSE_DATA_BUFFER_SIZE: usize = 16 * 1024;
const GENERIC_WRITE: DWORD = 0x40000000;
@@ -167,22 +177,25 @@ pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> {
}

extern "system" {
fn CreateFileW(lpFileName: LPCWSTR,
dwDesiredAccess: DWORD,
dwShareMode: DWORD,
lpSecurityAttributes: LPSECURITY_ATTRIBUTES,
dwCreationDisposition: DWORD,
dwFlagsAndAttributes: DWORD,
hTemplateFile: HANDLE)
-> HANDLE;
fn DeviceIoControl(hDevice: HANDLE,
dwIoControlCode: DWORD,
lpInBuffer: LPVOID,
nInBufferSize: DWORD,
lpOutBuffer: LPVOID,
nOutBufferSize: DWORD,
lpBytesReturned: LPDWORD,
lpOverlapped: LPOVERLAPPED) -> BOOL;
fn CreateFileW(
lpFileName: LPCWSTR,
dwDesiredAccess: DWORD,
dwShareMode: DWORD,
lpSecurityAttributes: LPSECURITY_ATTRIBUTES,
dwCreationDisposition: DWORD,
dwFlagsAndAttributes: DWORD,
hTemplateFile: HANDLE,
) -> HANDLE;
fn DeviceIoControl(
hDevice: HANDLE,
dwIoControlCode: DWORD,
lpInBuffer: LPVOID,
nInBufferSize: DWORD,
lpOutBuffer: LPVOID,
nOutBufferSize: DWORD,
lpBytesReturned: LPDWORD,
lpOverlapped: LPOVERLAPPED,
) -> BOOL;
fn CloseHandle(hObject: HANDLE) -> BOOL;
}

@@ -200,17 +213,18 @@ pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> {
let path = to_u16s(junction)?;

unsafe {
let h = CreateFileW(path.as_ptr(),
GENERIC_WRITE,
FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
0 as *mut _,
OPEN_EXISTING,
FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS,
ptr::null_mut());
let h = CreateFileW(
path.as_ptr(),
GENERIC_WRITE,
FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
0 as *mut _,
OPEN_EXISTING,
FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS,
ptr::null_mut(),
);

let mut data = [0u8; MAXIMUM_REPARSE_DATA_BUFFER_SIZE];
let db = data.as_mut_ptr()
as *mut REPARSE_MOUNTPOINT_DATA_BUFFER;
let db = data.as_mut_ptr() as *mut REPARSE_MOUNTPOINT_DATA_BUFFER;
let buf = &mut (*db).ReparseTarget as *mut u16;
let mut i = 0;
// FIXME: this conversion is very hacky
@@ -225,17 +239,19 @@ pub fn symlink_dir(config: &Config, src: &Path, dest: &Path) -> io::Result<()> {
(*db).ReparseTag = IO_REPARSE_TAG_MOUNT_POINT;
(*db).ReparseTargetMaximumLength = (i * 2) as WORD;
(*db).ReparseTargetLength = ((i - 1) * 2) as WORD;
(*db).ReparseDataLength =
(*db).ReparseTargetLength as DWORD + 12;
(*db).ReparseDataLength = (*db).ReparseTargetLength as DWORD + 12;

let mut ret = 0;
let res = DeviceIoControl(h as *mut _,
FSCTL_SET_REPARSE_POINT,
data.as_ptr() as *mut _,
(*db).ReparseDataLength + 8,
ptr::null_mut(), 0,
&mut ret,
ptr::null_mut());
let res = DeviceIoControl(
h as *mut _,
FSCTL_SET_REPARSE_POINT,
data.as_ptr() as *mut _,
(*db).ReparseDataLength + 8,
ptr::null_mut(),
0,
&mut ret,
ptr::null_mut(),
);

let out = if res == 0 {
Err(io::Error::last_os_error())
@@ -274,7 +290,10 @@ impl OutputFolder {
// the ANSI escape code to clear from the cursor to end of line.
// Travis seems to have trouble when _not_ using "\r\x1b[0K", that will
// randomly put lines to the top of the webpage.
print!("travis_fold:start:{0}\r\x1b[0Ktravis_time:start:{0}\r\x1b[0K", name);
print!(
"travis_fold:start:{0}\r\x1b[0Ktravis_time:start:{0}\r\x1b[0K",
name
);
OutputFolder {
name,
start_time: SystemTime::now(),
@@ -300,7 +319,7 @@ impl Drop for OutputFolder {
let finish = end_time.duration_since(UNIX_EPOCH);
println!(
"travis_fold:end:{0}\r\x1b[0K\n\
travis_time:end:{0}:start={1},finish={2},duration={3}\r\x1b[0K",
travis_time:end:{0}:start={1},finish={2},duration={3}\r\x1b[0K",
self.name,
to_nanos(start),
to_nanos(finish),
Copy path View file
@@ -1,9 +1,9 @@
use std::fs::File;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::thread;
use std::time::{SystemTime, UNIX_EPOCH};
use std::{env, fs};
use std::thread;

/// A helper macro to `unwrap` a result except also print out details like:
///
@@ -92,8 +92,11 @@ pub fn gnu_target(target: &str) -> &str {
}

pub fn make(host: &str) -> PathBuf {
if host.contains("bitrig") || host.contains("dragonfly") || host.contains("freebsd")
|| host.contains("netbsd") || host.contains("openbsd")
if host.contains("bitrig")
|| host.contains("dragonfly")
|| host.contains("freebsd")
|| host.contains("netbsd")
|| host.contains("openbsd")
{
PathBuf::from("gmake")
} else {
@@ -120,7 +123,8 @@ pub fn output(cmd: &mut Command) -> String {
}

pub fn rerun_if_changed_anything_in_dir(dir: &Path) {
let mut stack = dir.read_dir()
let mut stack = dir
.read_dir()
.unwrap()
.map(|e| e.unwrap())
.filter(|e| &*e.file_name() != ".git")
@@ -178,7 +182,7 @@ impl NativeLibBoilerplate {
/// ensure it's linked against correctly.
pub fn fixup_sanitizer_lib_name(&self, sanitizer_name: &str) {
if env::var("TARGET").unwrap() != "x86_64-apple-darwin" {
return
return;
}

let dir = self.out_dir.join("build/lib/darwin");
@@ -221,8 +225,8 @@ pub fn native_lib_boilerplate(
) -> Result<NativeLibBoilerplate, ()> {
rerun_if_changed_anything_in_dir(src_dir);

let out_dir = env::var_os("RUSTBUILD_NATIVE_DIR").unwrap_or_else(||
env::var_os("OUT_DIR").unwrap());
let out_dir =
env::var_os("RUSTBUILD_NATIVE_DIR").unwrap_or_else(|| env::var_os("OUT_DIR").unwrap());
let out_dir = PathBuf::from(out_dir).join(out_name);
t!(fs::create_dir_all(&out_dir));
if link_name.contains('=') {
@@ -246,9 +250,9 @@ pub fn native_lib_boilerplate(
}
}

pub fn sanitizer_lib_boilerplate(sanitizer_name: &str)
-> Result<(NativeLibBoilerplate, String), ()>
{
pub fn sanitizer_lib_boilerplate(
sanitizer_name: &str,
) -> Result<(NativeLibBoilerplate, String), ()> {
let (link_name, search_path, apple) = match &*env::var("TARGET").unwrap() {
"x86_64-unknown-linux-gnu" => (
format!("clang_rt.{}-x86_64", sanitizer_name),
@@ -270,12 +274,7 @@ pub fn sanitizer_lib_boilerplate(sanitizer_name: &str)
// The source for `compiler-rt` comes from the `compiler-builtins` crate, so
// load our env var set by cargo to find the source code.
let dir = env::var_os("DEP_COMPILER_RT_COMPILER_RT").unwrap();
let lib = native_lib_boilerplate(
dir.as_ref(),
sanitizer_name,
&to_link,
search_path,
)?;
let lib = native_lib_boilerplate(dir.as_ref(), sanitizer_name, &to_link, search_path)?;
Ok((lib, link_name))
}

Copy path View file
@@ -21,10 +21,7 @@ extern "Rust" {
#[rustc_allocator_nounwind]
fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
#[rustc_allocator_nounwind]
fn __rust_realloc(ptr: *mut u8,
old_size: usize,
align: usize,
new_size: usize) -> *mut u8;
fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
#[rustc_allocator_nounwind]
fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
}
@@ -154,12 +151,12 @@ unsafe impl Alloc for Global {
}

#[inline]
unsafe fn realloc(&mut self,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize)
-> Result<NonNull<u8>, AllocErr>
{
unsafe fn realloc(
&mut self,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
) -> Result<NonNull<u8>, AllocErr> {
NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
}

@@ -228,14 +225,15 @@ pub fn handle_alloc_error(layout: Layout) -> ! {
mod tests {
extern crate test;
use self::test::Bencher;
use alloc::{handle_alloc_error, Alloc, Global, Layout};
use boxed::Box;
use alloc::{Global, Alloc, Layout, handle_alloc_error};

#[test]
fn allocate_zeroed() {
unsafe {
let layout = Layout::from_size_align(1024, 1).unwrap();
let ptr = Global.alloc_zeroed(layout.clone())
let ptr = Global
.alloc_zeroed(layout.clone())
.unwrap_or_else(|_| handle_alloc_error(layout));

let mut i = ptr.cast::<u8>().as_ptr();
Copy path View file
@@ -1,11 +1,11 @@
use rand::{seq::SliceRandom, thread_rng, Rng};
use std::collections::BTreeMap;
use std::iter::Iterator;
use std::vec::Vec;
use std::collections::BTreeMap;
use rand::{Rng, seq::SliceRandom, thread_rng};
use test::{Bencher, black_box};
use test::{black_box, Bencher};

macro_rules! map_insert_rand_bench {
($name: ident, $n: expr, $map: ident) => (
($name: ident, $n: expr, $map: ident) => {
#[bench]
pub fn $name(b: &mut Bencher) {
let n: usize = $n;
@@ -26,11 +26,11 @@ macro_rules! map_insert_rand_bench {
});
black_box(map);
}
)
};
}

macro_rules! map_insert_seq_bench {
($name: ident, $n: expr, $map: ident) => (
($name: ident, $n: expr, $map: ident) => {
#[bench]
pub fn $name(b: &mut Bencher) {
let mut map = $map::new();
@@ -49,11 +49,11 @@ macro_rules! map_insert_seq_bench {
});
black_box(map);
}
)
};
}

macro_rules! map_find_rand_bench {
($name: ident, $n: expr, $map: ident) => (
($name: ident, $n: expr, $map: ident) => {
#[bench]
pub fn $name(b: &mut Bencher) {
let mut map = $map::new();
@@ -77,11 +77,11 @@ macro_rules! map_find_rand_bench {
black_box(t);
})
}
)
};
}

macro_rules! map_find_seq_bench {
($name: ident, $n: expr, $map: ident) => (
($name: ident, $n: expr, $map: ident) => {
#[bench]
pub fn $name(b: &mut Bencher) {
let mut map = $map::new();
@@ -100,20 +100,20 @@ macro_rules! map_find_seq_bench {
black_box(x);
})
}
)
};
}

map_insert_rand_bench!{insert_rand_100, 100, BTreeMap}
map_insert_rand_bench!{insert_rand_10_000, 10_000, BTreeMap}
map_insert_rand_bench! {insert_rand_100, 100, BTreeMap}
map_insert_rand_bench! {insert_rand_10_000, 10_000, BTreeMap}

map_insert_seq_bench!{insert_seq_100, 100, BTreeMap}
map_insert_seq_bench!{insert_seq_10_000, 10_000, BTreeMap}
map_insert_seq_bench! {insert_seq_100, 100, BTreeMap}
map_insert_seq_bench! {insert_seq_10_000, 10_000, BTreeMap}

map_find_rand_bench!{find_rand_100, 100, BTreeMap}
map_find_rand_bench!{find_rand_10_000, 10_000, BTreeMap}
map_find_rand_bench! {find_rand_100, 100, BTreeMap}
map_find_rand_bench! {find_rand_10_000, 10_000, BTreeMap}

map_find_seq_bench!{find_seq_100, 100, BTreeMap}
map_find_seq_bench!{find_seq_10_000, 10_000, BTreeMap}
map_find_seq_bench! {find_seq_100, 100, BTreeMap}
map_find_seq_bench! {find_seq_10_000, 10_000, BTreeMap}

fn bench_iter(b: &mut Bencher, size: i32) {
let mut map = BTreeMap::<i32, i32>::new();
Copy path View file
@@ -8,8 +8,8 @@ extern crate test;

mod btree;
mod linked_list;
mod string;
mod str;
mod slice;
mod str;
mod string;
mod vec;
mod vec_deque;
Copy path View file
@@ -1,11 +1,11 @@
use rand::{thread_rng};
use rand::thread_rng;
use std::mem;
use std::ptr;

use rand::distributions::{Alphanumeric, Standard};
use rand::{Rng, SeedableRng};
use rand::distributions::{Standard, Alphanumeric};
use rand_xorshift::XorShiftRng;
use test::{Bencher, black_box};
use test::{black_box, Bencher};

#[bench]
fn iterator(b: &mut Bencher) {
@@ -230,7 +230,10 @@ fn gen_strings(len: usize) -> Vec<String> {

fn gen_big_random(len: usize) -> Vec<[u64; 16]> {
let mut rng = XorShiftRng::from_seed(SEED);
rng.sample_iter(&Standard).map(|x| [x; 16]).take(len).collect()
rng.sample_iter(&Standard)
.map(|x| [x; 16])
.take(len)
.collect()
}

macro_rules! sort {
@@ -241,7 +244,7 @@ macro_rules! sort {
b.iter(|| v.clone().$f());
b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64;
}
}
};
}

macro_rules! sort_strings {
@@ -253,7 +256,7 @@ macro_rules! sort_strings {
b.iter(|| v.clone().$f());
b.bytes = $len * mem::size_of::<&str>() as u64;
}
}
};
}

macro_rules! sort_expensive {
@@ -275,7 +278,7 @@ macro_rules! sort_expensive {
});
b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64;
}
}
};
}

macro_rules! sort_lexicographic {
@@ -286,7 +289,7 @@ macro_rules! sort_lexicographic {
b.iter(|| v.clone().$f(|x| x.to_string()));
b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64;
}
}
};
}

sort!(sort, sort_small_ascending, gen_ascending, 10);
@@ -296,88 +299,205 @@ sort!(sort, sort_small_big, gen_big_random, 10);
sort!(sort, sort_medium_random, gen_random, 100);
sort!(sort, sort_large_ascending, gen_ascending, 10000);
sort!(sort, sort_large_descending, gen_descending, 10000);
sort!(sort, sort_large_mostly_ascending, gen_mostly_ascending, 10000);
sort!(sort, sort_large_mostly_descending, gen_mostly_descending, 10000);
sort!(
sort,
sort_large_mostly_ascending,
gen_mostly_ascending,
10000
);
sort!(
sort,
sort_large_mostly_descending,
gen_mostly_descending,
10000
);
sort!(sort, sort_large_random, gen_random, 10000);
sort!(sort, sort_large_big, gen_big_random, 10000);
sort_strings!(sort, sort_large_strings, gen_strings, 10000);
sort_expensive!(sort_by, sort_large_expensive, gen_random, 10000);

sort!(sort_unstable, sort_unstable_small_ascending, gen_ascending, 10);
sort!(sort_unstable, sort_unstable_small_descending, gen_descending, 10);
sort!(
sort_unstable,
sort_unstable_small_ascending,
gen_ascending,
10
);
sort!(
sort_unstable,
sort_unstable_small_descending,
gen_descending,
10
);
sort!(sort_unstable, sort_unstable_small_random, gen_random, 10);
sort!(sort_unstable, sort_unstable_small_big, gen_big_random, 10);
sort!(sort_unstable, sort_unstable_medium_random, gen_random, 100);
sort!(sort_unstable, sort_unstable_large_ascending, gen_ascending, 10000);
sort!(sort_unstable, sort_unstable_large_descending, gen_descending, 10000);
sort!(sort_unstable, sort_unstable_large_mostly_ascending, gen_mostly_ascending, 10000);
sort!(sort_unstable, sort_unstable_large_mostly_descending, gen_mostly_descending, 10000);
sort!(
sort_unstable,
sort_unstable_large_ascending,
gen_ascending,
10000
);
sort!(
sort_unstable,
sort_unstable_large_descending,
gen_descending,
10000
);
sort!(
sort_unstable,
sort_unstable_large_mostly_ascending,
gen_mostly_ascending,
10000
);
sort!(
sort_unstable,
sort_unstable_large_mostly_descending,
gen_mostly_descending,
10000
);
sort!(sort_unstable, sort_unstable_large_random, gen_random, 10000);
sort!(sort_unstable, sort_unstable_large_big, gen_big_random, 10000);
sort_strings!(sort_unstable, sort_unstable_large_strings, gen_strings, 10000);
sort_expensive!(sort_unstable_by, sort_unstable_large_expensive, gen_random, 10000);
sort!(
sort_unstable,
sort_unstable_large_big,
gen_big_random,
10000
);
sort_strings!(
sort_unstable,
sort_unstable_large_strings,
gen_strings,
10000
);
sort_expensive!(
sort_unstable_by,
sort_unstable_large_expensive,
gen_random,
10000
);

sort_lexicographic!(sort_by_key, sort_by_key_lexicographic, gen_random, 10000);
sort_lexicographic!(sort_unstable_by_key, sort_unstable_by_key_lexicographic, gen_random, 10000);
sort_lexicographic!(sort_by_cached_key, sort_by_cached_key_lexicographic, gen_random, 10000);
sort_lexicographic!(
sort_unstable_by_key,
sort_unstable_by_key_lexicographic,
gen_random,
10000
);
sort_lexicographic!(
sort_by_cached_key,
sort_by_cached_key_lexicographic,
gen_random,
10000
);

macro_rules! reverse {
($name:ident, $ty:ty, $f:expr) => {
#[bench]
fn $name(b: &mut Bencher) {
// odd length and offset by 1 to be as unaligned as possible
let n = 0xFFFFF;
let mut v: Vec<_> =
(0..1+(n / mem::size_of::<$ty>() as u64))
let mut v: Vec<_> = (0..1 + (n / mem::size_of::<$ty>() as u64))
.map($f)
.collect();
b.iter(|| black_box(&mut v[1..]).reverse());
b.bytes = n;
}
}
};
}

reverse!(reverse_u8, u8, |x| x as u8);
reverse!(reverse_u16, u16, |x| x as u16);
reverse!(reverse_u8x3, [u8;3], |x| [x as u8, (x>>8) as u8, (x>>16) as u8]);
reverse!(reverse_u8x3, [u8; 3], |x| [
x as u8,
(x >> 8) as u8,
(x >> 16) as u8
]);
reverse!(reverse_u32, u32, |x| x as u32);
reverse!(reverse_u64, u64, |x| x as u64);
reverse!(reverse_u128, u128, |x| x as u128);
#[repr(simd)] struct F64x4(f64, f64, f64, f64);
reverse!(reverse_simd_f64x4, F64x4, |x| { let x = x as f64; F64x4(x,x,x,x) });
#[repr(simd)]
struct F64x4(f64, f64, f64, f64);
reverse!(reverse_simd_f64x4, F64x4, |x| {
let x = x as f64;
F64x4(x, x, x, x)
});

macro_rules! rotate {
($name:ident, $gen:expr, $len:expr, $mid:expr) => {
#[bench]
fn $name(b: &mut Bencher) {
let size = mem::size_of_val(&$gen(1)[0]);
let mut v = $gen($len * 8 / size);
b.iter(|| black_box(&mut v).rotate_left(($mid*8+size-1)/size));
b.iter(|| black_box(&mut v).rotate_left(($mid * 8 + size - 1) / size));
b.bytes = (v.len() * size) as u64;
}
}
};
}

rotate!(rotate_tiny_by1, gen_random, 16, 1);
rotate!(rotate_tiny_half, gen_random, 16, 16/2);
rotate!(rotate_tiny_half_plus_one, gen_random, 16, 16/2+1);
rotate!(rotate_tiny_half, gen_random, 16, 16 / 2);
rotate!(rotate_tiny_half_plus_one, gen_random, 16, 16 / 2 + 1);

rotate!(rotate_medium_by1, gen_random, 9158, 1);
rotate!(rotate_medium_by727_u64, gen_random, 9158, 727);
rotate!(rotate_medium_by727_bytes, gen_random_bytes, 9158, 727);
rotate!(rotate_medium_by727_strings, gen_strings, 9158, 727);
rotate!(rotate_medium_half, gen_random, 9158, 9158/2);
rotate!(rotate_medium_half_plus_one, gen_random, 9158, 9158/2+1);
rotate!(rotate_medium_half, gen_random, 9158, 9158 / 2);
rotate!(rotate_medium_half_plus_one, gen_random, 9158, 9158 / 2 + 1);

// Intended to use more RAM than the machine has cache
rotate!(rotate_huge_by1, gen_random, 5*1024*1024, 1);
rotate!(rotate_huge_by9199_u64, gen_random, 5*1024*1024, 9199);
rotate!(rotate_huge_by9199_bytes, gen_random_bytes, 5*1024*1024, 9199);
rotate!(rotate_huge_by9199_strings, gen_strings, 5*1024*1024, 9199);
rotate!(rotate_huge_by9199_big, gen_big_random, 5*1024*1024, 9199);
rotate!(rotate_huge_by1234577_u64, gen_random, 5*1024*1024, 1234577);
rotate!(rotate_huge_by1234577_bytes, gen_random_bytes, 5*1024*1024, 1234577);
rotate!(rotate_huge_by1234577_strings, gen_strings, 5*1024*1024, 1234577);
rotate!(rotate_huge_by1234577_big, gen_big_random, 5*1024*1024, 1234577);
rotate!(rotate_huge_half, gen_random, 5*1024*1024, 5*1024*1024/2);
rotate!(rotate_huge_half_plus_one, gen_random, 5*1024*1024, 5*1024*1024/2+1);
rotate!(rotate_huge_by1, gen_random, 5 * 1024 * 1024, 1);
rotate!(rotate_huge_by9199_u64, gen_random, 5 * 1024 * 1024, 9199);
rotate!(
rotate_huge_by9199_bytes,
gen_random_bytes,
5 * 1024 * 1024,
9199
);
rotate!(
rotate_huge_by9199_strings,
gen_strings,
5 * 1024 * 1024,
9199
);
rotate!(
rotate_huge_by9199_big,
gen_big_random,
5 * 1024 * 1024,
9199
);
rotate!(
rotate_huge_by1234577_u64,
gen_random,
5 * 1024 * 1024,
1234577
);
rotate!(
rotate_huge_by1234577_bytes,
gen_random_bytes,
5 * 1024 * 1024,
1234577
);
rotate!(
rotate_huge_by1234577_strings,
gen_strings,
5 * 1024 * 1024,
1234577
);
rotate!(
rotate_huge_by1234577_big,
gen_big_random,
5 * 1024 * 1024,
1234577
);
rotate!(
rotate_huge_half,
gen_random,
5 * 1024 * 1024,
5 * 1024 * 1024 / 2
);
rotate!(
rotate_huge_half_plus_one,
gen_random,
5 * 1024 * 1024,
5 * 1024 * 1024 / 2 + 1
);
Copy path View file
@@ -1,4 +1,4 @@
use test::{Bencher, black_box};
use test::{black_box, Bencher};

#[bench]
fn char_iterator(b: &mut Bencher) {
@@ -12,7 +12,9 @@ fn char_iterator_for(b: &mut Bencher) {
let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";

b.iter(|| {
for ch in s.chars() { black_box(ch); }
for ch in s.chars() {
black_box(ch);
}
});
}

@@ -40,7 +42,9 @@ fn char_iterator_rev_for(b: &mut Bencher) {
let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";

b.iter(|| {
for ch in s.chars().rev() { black_box(ch); }
for ch in s.chars().rev() {
black_box(ch);
}
});
}

@@ -62,7 +66,8 @@ fn char_indicesator_rev(b: &mut Bencher) {

#[bench]
fn split_unicode_ascii(b: &mut Bencher) {
let s = "ประเทศไทย中华Việt Namประเทศไทย中华Việt Nam";
let s =
"ประเทศไทย中华Việt Namประเทศไทย中华Việt Nam";

b.iter(|| assert_eq!(s.split('V').count(), 3));
}
@@ -79,7 +84,9 @@ fn split_ascii(b: &mut Bencher) {
fn split_extern_fn(b: &mut Bencher) {
let s = "Mary had a little lamb, Little lamb, little-lamb.";
let len = s.split(' ').count();
fn pred(c: char) -> bool { c == ' ' }
fn pred(c: char) -> bool {
c == ' '
}

b.iter(|| assert_eq!(s.split(pred).count(), len));
}
@@ -185,16 +192,19 @@ fn bench_contains_equal(b: &mut Bencher) {
})
}


macro_rules! make_test_inner {
($s:ident, $code:expr, $name:ident, $str:expr, $iters:expr) => {
#[bench]
fn $name(bencher: &mut Bencher) {
let mut $s = $str;
black_box(&mut $s);
bencher.iter(|| for _ in 0..$iters { black_box($code); });
bencher.iter(|| {
for _ in 0..$iters {
black_box($code);
}
});
}
}
};
}

macro_rules! make_test {
@@ -283,11 +293,25 @@ make_test!(starts_with_ascii_char, s, s.starts_with('/'), 1024);
make_test!(ends_with_ascii_char, s, s.ends_with('/'), 1024);
make_test!(starts_with_unichar, s, s.starts_with('\u{1F4A4}'), 1024);
make_test!(ends_with_unichar, s, s.ends_with('\u{1F4A4}'), 1024);
make_test!(starts_with_str, s, s.starts_with("💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩"), 1024);
make_test!(ends_with_str, s, s.ends_with("💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩"), 1024);
make_test!(
starts_with_str,
s,
s.starts_with("💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩"),
1024
);
make_test!(
ends_with_str,
s,
s.ends_with("💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩"),
1024
);

make_test!(split_space_char, s, s.split(' ').count());
make_test!(split_terminator_space_char, s, s.split_terminator(' ').count());
make_test!(
split_terminator_space_char,
s,
s.split_terminator(' ').count()
);

make_test!(splitn_space_char, s, s.splitn(10, ' ').count());
make_test!(rsplitn_space_char, s, s.rsplitn(10, ' ').count());
Copy path View file
@@ -1,5 +1,5 @@
use std::iter::{repeat, FromIterator};
use test::Bencher;
use std::iter::{FromIterator, repeat};

#[bench]
fn bench_new(b: &mut Bencher) {
Copy path View file
@@ -1,5 +1,5 @@
use std::collections::VecDeque;
use test::{Bencher, black_box};
use test::{black_box, Bencher};

#[bench]
fn bench_new(b: &mut Bencher) {
Copy path View file
@@ -16,8 +16,9 @@ pub use core::borrow::{Borrow, BorrowMut};

#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, B: ?Sized> Borrow<B> for Cow<'a, B>
where B: ToOwned,
<B as ToOwned>::Owned: 'a
where
B: ToOwned,
<B as ToOwned>::Owned: 'a,
{
fn borrow(&self) -> &B {
&**self
@@ -68,17 +69,20 @@ pub trait ToOwned {
/// let mut v: Vec<i32> = Vec::new();
/// [1, 2][..].clone_into(&mut v);
/// ```
#[unstable(feature = "toowned_clone_into",
reason = "recently added",
issue = "41263")]
#[unstable(
feature = "toowned_clone_into",
reason = "recently added",
issue = "41263"
)]
fn clone_into(&self, target: &mut Self::Owned) {
*target = self.to_owned();
}
}

#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ToOwned for T
where T: Clone
where
T: Clone,
{
type Owned = T;
fn to_owned(&self) -> T {
@@ -168,22 +172,22 @@ impl<T> ToOwned for T
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub enum Cow<'a, B: ?Sized + 'a>
where B: ToOwned
where
B: ToOwned,
{
/// Borrowed data.
#[stable(feature = "rust1", since = "1.0.0")]
Borrowed(#[stable(feature = "rust1", since = "1.0.0")]
&'a B),
Borrowed(#[stable(feature = "rust1", since = "1.0.0")] &'a B),

/// Owned data.
#[stable(feature = "rust1", since = "1.0.0")]
Owned(#[stable(feature = "rust1", since = "1.0.0")]
<B as ToOwned>::Owned),
Owned(#[stable(feature = "rust1", since = "1.0.0")] <B as ToOwned>::Owned),
}

#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, B: ?Sized> Clone for Cow<'a, B>
where B: ToOwned
where
B: ToOwned,
{
fn clone(&self) -> Cow<'a, B> {
match *self {
@@ -208,7 +212,8 @@ impl<'a, B: ?Sized> Clone for Cow<'a, B>
}

impl<'a, B: ?Sized> Cow<'a, B>
where B: ToOwned
where
B: ToOwned,
{
/// Acquires a mutable reference to the owned form of the data.
///
@@ -286,7 +291,8 @@ impl<'a, B: ?Sized> Cow<'a, B>

#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, B: ?Sized> Deref for Cow<'a, B>
where B: ToOwned
where
B: ToOwned,
{
type Target = B;

@@ -303,7 +309,8 @@ impl<'a, B: ?Sized> Eq for Cow<'a, B> where B: Eq + ToOwned {}

#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, B: ?Sized> Ord for Cow<'a, B>
where B: Ord + ToOwned
where
B: Ord + ToOwned,
{
#[inline]
fn cmp(&self, other: &Cow<'a, B>) -> Ordering {
@@ -313,8 +320,9 @@ impl<'a, B: ?Sized> Ord for Cow<'a, B>

#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, 'b, B: ?Sized, C: ?Sized> PartialEq<Cow<'b, C>> for Cow<'a, B>
where B: PartialEq<C> + ToOwned,
C: ToOwned
where
B: PartialEq<C> + ToOwned,
C: ToOwned,
{
#[inline]
fn eq(&self, other: &Cow<'b, C>) -> bool {
@@ -324,7 +332,8 @@ impl<'a, 'b, B: ?Sized, C: ?Sized> PartialEq<Cow<'b, C>> for Cow<'a, B>

#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, B: ?Sized> PartialOrd for Cow<'a, B>
where B: PartialOrd + ToOwned
where
B: PartialOrd + ToOwned,
{
#[inline]
fn partial_cmp(&self, other: &Cow<'a, B>) -> Option<Ordering> {
@@ -334,8 +343,9 @@ impl<'a, B: ?Sized> PartialOrd for Cow<'a, B>

#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, B: ?Sized> fmt::Debug for Cow<'a, B>
where B: fmt::Debug + ToOwned,
<B as ToOwned>::Owned: fmt::Debug
where
B: fmt::Debug + ToOwned,
<B as ToOwned>::Owned: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
@@ -347,8 +357,9 @@ impl<'a, B: ?Sized> fmt::Debug for Cow<'a, B>

#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, B: ?Sized> fmt::Display for Cow<'a, B>
where B: fmt::Display + ToOwned,
<B as ToOwned>::Owned: fmt::Display
where
B: fmt::Display + ToOwned,
<B as ToOwned>::Owned: fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
@@ -360,8 +371,9 @@ impl<'a, B: ?Sized> fmt::Display for Cow<'a, B>

#[stable(feature = "default", since = "1.11.0")]
impl<'a, B: ?Sized> Default for Cow<'a, B>
where B: ToOwned,
<B as ToOwned>::Owned: Default
where
B: ToOwned,
<B as ToOwned>::Owned: Default,
{
/// Creates an owned Cow<'a, B> with the default value for the contained owned value.
fn default() -> Cow<'a, B> {
@@ -371,7 +383,8 @@ impl<'a, B: ?Sized> Default for Cow<'a, B>

#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, B: ?Sized> Hash for Cow<'a, B>
where B: Hash + ToOwned
where
B: Hash + ToOwned,
{
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
Copy path View file
@@ -63,19 +63,19 @@ use core::convert::From;
use core::fmt;
use core::future::Future;
use core::hash::{Hash, Hasher};
use core::iter::{Iterator, FromIterator, FusedIterator};
use core::iter::{FromIterator, FusedIterator, Iterator};
use core::marker::{Unpin, Unsize};
use core::mem;
use core::pin::Pin;
use core::ops::{
CoerceUnsized, DispatchFromDyn, Deref, DerefMut, Receiver, Generator, GeneratorState
CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Generator, GeneratorState, Receiver,
};
use core::pin::Pin;
use core::ptr::{self, NonNull, Unique};
use core::task::{LocalWaker, Poll};

use vec::Vec;
use raw_vec::RawVec;
use str::from_boxed_utf8_unchecked;
use vec::Vec;

/// A pointer type for heap allocation.
///
@@ -199,7 +199,11 @@ impl<T: ?Sized> Box<T> {
Box::into_unique(b).into()
}

#[unstable(feature = "ptr_internals", issue = "0", reason = "use into_raw_non_null instead")]
#[unstable(
feature = "ptr_internals",
issue = "0",
reason = "use into_raw_non_null instead"
)]
#[inline]
#[doc(hidden)]
pub fn into_unique(b: Box<T>) -> Unique<T> {
@@ -253,7 +257,7 @@ impl<T: ?Sized> Box<T> {
#[inline]
pub fn leak<'a>(b: Box<T>) -> &'a mut T
where
T: 'a // Technically not needed, but kept to be explicit.
T: 'a, // Technically not needed, but kept to be explicit.
{
unsafe { &mut *Box::into_raw(b) }
}
@@ -321,7 +325,6 @@ impl<T: Clone> Clone for Box<T> {
}
}


#[stable(feature = "box_slice_clone", since = "1.3.0")]
impl Clone for Box<str> {
fn clone(&self) -> Self {
@@ -666,7 +669,6 @@ impl<I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<I> {
#[stable(feature = "fused", since = "1.26.0")]
impl<I: FusedIterator + ?Sized> FusedIterator for Box<I> {}


/// `FnBox` is a version of the `FnOnce` intended for use with boxed
/// closure objects. The idea is that where one would normally store a
/// `Box<dyn FnOnce()>` in a data structure, you should use
@@ -705,18 +707,25 @@ impl<I: FusedIterator + ?Sized> FusedIterator for Box<I> {}
/// }
/// ```
#[rustc_paren_sugar]
#[unstable(feature = "fnbox",
reason = "will be deprecated if and when `Box<FnOnce>` becomes usable", issue = "28796")]
#[unstable(
feature = "fnbox",
reason = "will be deprecated if and when `Box<FnOnce>` becomes usable",
issue = "28796"
)]
pub trait FnBox<A> {
type Output;

fn call_box(self: Box<Self>, args: A) -> Self::Output;
}

#[unstable(feature = "fnbox",
reason = "will be deprecated if and when `Box<FnOnce>` becomes usable", issue = "28796")]
#[unstable(
feature = "fnbox",
reason = "will be deprecated if and when `Box<FnOnce>` becomes usable",
issue = "28796"
)]
impl<A, F> FnBox<A> for F
where F: FnOnce<A>
where
F: FnOnce<A>,
{
type Output = F::Output;

@@ -725,8 +734,11 @@ impl<A, F> FnBox<A> for F
}
}

#[unstable(feature = "fnbox",
reason = "will be deprecated if and when `Box<FnOnce>` becomes usable", issue = "28796")]
#[unstable(
feature = "fnbox",
reason = "will be deprecated if and when `Box<FnOnce>` becomes usable",
issue = "28796"
)]
impl<'a, A, R> FnOnce<A> for Box<dyn FnBox<A, Output = R> + 'a> {
type Output = R;

@@ -735,8 +747,11 @@ impl<'a, A, R> FnOnce<A> for Box<dyn FnBox<A, Output = R> + 'a> {
}
}

#[unstable(feature = "fnbox",
reason = "will be deprecated if and when `Box<FnOnce>` becomes usable", issue = "28796")]
#[unstable(
feature = "fnbox",
reason = "will be deprecated if and when `Box<FnOnce>` becomes usable",
issue = "28796"
)]
impl<'a, A, R> FnOnce<A> for Box<dyn FnBox<A, Output = R> + Send + 'a> {
type Output = R;

@@ -860,11 +875,12 @@ impl<T: ?Sized> AsMut<T> for Box<T> {
* could have a method to project a Pin<T> from it.
*/
#[stable(feature = "pin", since = "1.33.0")]
impl<T: ?Sized> Unpin for Box<T> { }
impl<T: ?Sized> Unpin for Box<T> {}

#[unstable(feature = "generator_trait", issue = "43122")]
impl<T> Generator for Box<T>
where T: Generator + ?Sized
where
T: Generator + ?Sized,
{
type Yield = T::Yield;
type Return = T::Return;
Copy path View file
@@ -1,11 +1,11 @@
//! Test for `boxed` mod.

use core::any::Any;
use core::ops::Deref;
use core::result::Result::{Err, Ok};
use core::clone::Clone;
use core::f64;
use core::i64;
use core::ops::Deref;
use core::result::Result::{Err, Ok};

use std::boxed::Box;

Oops, something went wrong.