Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cosmetic improvements to comments #58524

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
2 changes: 1 addition & 1 deletion src/bootstrap/bin/rustc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ fn main() {
//
// `compiler_builtins` are unconditionally compiled with panic=abort to
// workaround undefined references to `rust_eh_unwind_resume` generated
// otherwise, see issue https://github.com/rust-lang/rust/issues/43095.
// otherwise, see issue #43095.
if crate_name == "panic_abort" ||
crate_name == "compiler_builtins" && stage != "0" {
cmd.arg("-C").arg("panic=abort");
Expand Down
39 changes: 20 additions & 19 deletions src/bootstrap/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash {
// It is reasonable to not have an implementation of make_run for rules
// who do not want to get called from the root context. This means that
// they are likely dependencies (e.g., sysroot creation) or similar, and
// as such calling them from ./x.py isn't logical.
// as such calling them from `./x.py` isn't logical.
unimplemented!()
}
}
Expand Down Expand Up @@ -236,11 +236,11 @@ impl StepDescription {
#[derive(Clone)]
pub struct ShouldRun<'a> {
pub builder: &'a Builder<'a>,
// use a BTreeSet to maintain sort order
// Use a `BTreeSet` to maintain sort order.
paths: BTreeSet<PathSet>,

// If this is a default rule, this is an additional constraint placed on
// its run. Generally something like compiler docs being enabled.
// its run (generally something like compiler docs being enabled).
is_really_default: bool,
}

Expand All @@ -249,7 +249,8 @@ impl<'a> ShouldRun<'a> {
ShouldRun {
builder,
paths: BTreeSet::new(),
is_really_default: true, // by default no additional conditions
// By default no additional conditions.
is_really_default: true,
}
}

Expand Down Expand Up @@ -277,12 +278,12 @@ impl<'a> ShouldRun<'a> {
self
}

// single, non-aliased path
// Single, non-aliased path.
pub fn path(self, path: &str) -> Self {
self.paths(&[path])
}

// multiple aliases for the same job
// Multiple aliases for the same job.
pub fn paths(mut self, paths: &[&str]) -> Self {
self.paths
.insert(PathSet::Set(paths.iter().map(PathBuf::from).collect()));
Expand All @@ -301,7 +302,7 @@ impl<'a> ShouldRun<'a> {
self
}

// allows being more explicit about why should_run in Step returns the value passed to it
// Allows being more explicit about why `Step::should_run` returns the value passed to it.
pub fn never(mut self) -> ShouldRun<'a> {
self.paths.insert(PathSet::empty());
self
Expand Down Expand Up @@ -677,7 +678,7 @@ impl<'a> Builder<'a> {
let compiler = self.compiler(self.top_stage, host);
cmd.env("RUSTC_STAGE", compiler.stage.to_string())
.env("RUSTC_SYSROOT", self.sysroot(compiler))
// Note that this is *not* the sysroot_libdir because rustdoc must be linked
// Note that this is *not* the `sysroot_libdir` because rustdoc must be linked
// equivalently to rustc.
.env("RUSTDOC_LIBDIR", self.rustc_libdir(compiler))
.env("CFG_RELEASE_CHANNEL", &self.config.channel)
Expand Down Expand Up @@ -813,12 +814,12 @@ impl<'a> Builder<'a> {
}

cargo.arg("-j").arg(self.jobs().to_string());
// Remove make-related flags to ensure Cargo can correctly set things up
// Remove make-related flags to ensure Cargo can correctly set things up.
cargo.env_remove("MAKEFLAGS");
cargo.env_remove("MFLAGS");

// FIXME: Temporary fix for https://github.com/rust-lang/cargo/issues/3005
// Force cargo to output binaries with disambiguating hashes in the name
// FIXME: temporary fix for rust-lang/cargo#3005.
// Force cargo to output binaries with disambiguating hashes in the name.
let metadata = if compiler.stage == 0 {
// Treat stage0 like special channel, whether it's a normal prior-
// release rustc or a local rebuild with the same version, so we
Expand Down Expand Up @@ -863,7 +864,7 @@ impl<'a> Builder<'a> {
// "raw" compiler in that it's the exact snapshot we download. Normally
// the stage0 build means it uses libraries build by the stage0
// compiler, but for tools we just use the precompiled libraries that
// we've downloaded
// we've downloaded.
let use_snapshot = mode == Mode::ToolBootstrap;
assert!(!use_snapshot || stage == 0 || self.local_rebuild);

Expand Down Expand Up @@ -920,7 +921,7 @@ impl<'a> Builder<'a> {

if mode.is_tool() {
// Tools like cargo and rls don't get debuginfo by default right now, but this can be
// enabled in the config. Adding debuginfo makes them several times larger.
// enabled in the config. Adding debuginfo makes them several times larger.
if self.config.rust_debuginfo_tools {
cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string());
cargo.env(
Expand Down Expand Up @@ -1028,7 +1029,7 @@ impl<'a> Builder<'a> {
// Build scripts use either the `cc` crate or `configure/make` so we pass
// the options through environment variables that are fetched and understood by both.
//
// FIXME: the guard against msvc shouldn't need to be here
// FIXME: the guard against MSVC shouldn't need to be here.
if target.contains("msvc") {
if let Some(ref cl) = self.config.llvm_clang_cl {
cargo.env("CC", cl).env("CXX", cl);
Expand All @@ -1040,7 +1041,7 @@ impl<'a> Builder<'a> {
Some(ref s) => s,
None => return s.display().to_string(),
};
// FIXME: the cc-rs crate only recognizes the literal strings
// FIXME: the cc-rs crate only recognizes the literal strings.
// `ccache` and `sccache` when doing caching compilations, so we
// mirror that here. It should probably be fixed upstream to
// accept a new env var or otherwise work with custom ccache
Expand Down Expand Up @@ -1080,12 +1081,12 @@ impl<'a> Builder<'a> {
cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string());
}

// For `cargo doc` invocations, make rustdoc print the Rust version into the docs
// For `cargo doc` invocations, make rustdoc print the Rust version into the docs.
cargo.env("RUSTDOC_CRATE_VERSION", self.rust_version());

// Environment variables *required* throughout the build
// Environment variables *required* throughout the build.
//
// FIXME: should update code to not require this env var
// FIXME: should update code to not require this env var.
cargo.env("CFG_COMPILER_HOST_TRIPLE", target);

// Set this for all builds to make sure doc builds also get it.
Expand Down Expand Up @@ -1476,7 +1477,7 @@ mod __test {
#[test]
fn dist_with_target_flag() {
let mut config = configure(&["B"], &["C"]);
config.run_host_only = false; // as-if --target=C was passed
config.run_host_only = false; // as if `--target=C` were passed
let build = Build::new(config);
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
Expand Down
3 changes: 2 additions & 1 deletion src/bootstrap/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,8 @@ lazy_static! {
pub struct Cache(
RefCell<HashMap<
TypeId,
Box<dyn Any>, // actually a HashMap<Step, Interned<Step::Output>>
// Actually a `HashMap<Step, Interned<Step::Output>>`.
Box<dyn Any>,
>>
);

Expand Down
42 changes: 21 additions & 21 deletions src/bootstrap/compile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,12 +165,12 @@ pub fn std_cargo(builder: &Builder,
let features = builder.std_features();

if compiler.stage != 0 && builder.config.sanitizers {
// This variable is used by the sanitizer runtime crates, e.g.
// rustc_lsan, to build the sanitizer runtime from C code
// This variable is used by the sanitizer runtime crates, e.g.,
// `rustc_lsan`, to build the sanitizer runtime from C code
// When this variable is missing, those crates won't compile the C code,
// so we don't set this variable during stage0 where llvm-config is
// missing
// We also only build the runtimes when --enable-sanitizers (or its
// We also only build the runtimes when `--enable-sanitizers` (or its
// config.toml equivalent) is used
let llvm_config = builder.ensure(native::Llvm {
target: builder.config.build,
Expand Down Expand Up @@ -895,7 +895,7 @@ impl Step for Assemble {
run.never()
}

/// Prepare a new compiler from the artifacts in `stage`
/// Prepares a new compiler from the artifacts in `stage`
///
/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
/// must have been previously produced by the `stage - 1` builder.build
Expand All @@ -921,17 +921,17 @@ impl Step for Assemble {
// produce some other architecture compiler we need to start from
// `build` to get there.
//
// FIXME: Perhaps we should download those libraries?
// It would make builds faster...
// FIXME: perhaps we should download those libraries?
// It would certainly make builds faster.
//
// FIXME: It may be faster if we build just a stage 1 compiler and then
// use that to bootstrap this compiler forward.
// FIXME: it may be faster if we build just a stage 1 compiler and then
// use that to bootstrap this compiler forward.
let build_compiler =
builder.compiler(target_compiler.stage - 1, builder.config.build);

// Build the libraries for this compiler to link to (i.e., the libraries
// it uses at runtime). NOTE: Crates the target compiler compiles don't
// link to these. (FIXME: Is that correct? It seems to be correct most
// link to these. (FIXME: is that correct? It seems to be correct most
// of the time but I think we do link to these for stage2/bin compilers
// when not performing a full bootstrap).
builder.ensure(Rustc {
Expand All @@ -958,7 +958,7 @@ impl Step for Assemble {
let host = target_compiler.host;
builder.info(&format!("Assembling stage{} compiler ({})", stage, host));

// Link in all dylibs to the libdir
// Link in all dylibs to the libdir.
let sysroot = builder.sysroot(target_compiler);
let sysroot_libdir = sysroot.join(libdir(&*host));
t!(fs::create_dir_all(&sysroot_libdir));
Expand All @@ -979,7 +979,7 @@ impl Step for Assemble {

dist::maybe_install_llvm_dylib(builder, target_compiler.host, &sysroot);

// Link the compiler binary itself into place
// Link the compiler binary itself into place.
let out_dir = builder.cargo_out(build_compiler, Mode::Rustc, host);
let rustc = out_dir.join(exe("rustc_binary", &*host));
let bindir = sysroot.join("bin");
Expand All @@ -992,7 +992,7 @@ impl Step for Assemble {
}
}

/// Link some files into a rustc sysroot.
/// Links some files into a rustc sysroot.
///
/// For a particular stage this will link the file listed in `stamp` into the
/// `sysroot_dst` provided.
Expand All @@ -1013,16 +1013,16 @@ pub fn run_cargo(builder: &Builder,
return Vec::new();
}

// `target_root_dir` looks like $dir/$target/release
// `target_root_dir` looks like `$dir/$target/release`.
let target_root_dir = stamp.parent().unwrap();
// `target_deps_dir` looks like $dir/$target/release/deps
// `target_deps_dir` looks like `$dir/$target/release/deps`.
let target_deps_dir = target_root_dir.join("deps");
// `host_root_dir` looks like $dir/release
// `host_root_dir` looks like `$dir/release`.
let host_root_dir = target_root_dir.parent().unwrap() // chop off `release`
.parent().unwrap() // chop off `$target`
.join(target_root_dir.file_name().unwrap());

// Spawn Cargo slurping up its JSON output. We'll start building up the
// Spawn Cargo, collecting its JSON output. We'll start building up the
// `deps` array of all files it generated along with a `toplevel` array of
// files we need to probe for later.
let mut deps = Vec::new();
Expand All @@ -1033,7 +1033,7 @@ pub fn run_cargo(builder: &Builder,
_ => return,
};
for filename in filenames {
// Skip files like executables
// Skip files like executables.
if !filename.ends_with(".rlib") &&
!filename.ends_with(".lib") &&
!is_dylib(&filename) &&
Expand All @@ -1056,7 +1056,7 @@ pub fn run_cargo(builder: &Builder,
continue;
}

// Otherwise this was a "top level artifact" which right now doesn't
// Otherwise, this was a "top level artifact" which right now doesn't
// have a hash in the name, but there's a version of this file in
// the `deps` folder which *does* have a hash in the name. That's
// the one we'll want to we'll probe for it later.
Expand All @@ -1080,7 +1080,7 @@ pub fn run_cargo(builder: &Builder,
exit(1);
}

// Ok now we need to actually find all the files listed in `toplevel`. We've
// Now we need to actually find all the files listed in `toplevel`. We've
// got a list of prefix/extensions and we basically just need to find the
// most recent file in the `deps` folder corresponding to each one.
let contents = t!(target_deps_dir.read_dir())
Expand Down Expand Up @@ -1168,15 +1168,15 @@ pub fn stream_cargo(
Err(e) => panic!("failed to execute command: {:?}\nerror: {}", cargo, e),
};

// Spawn Cargo slurping up its JSON output. We'll start building up the
// Spawn Cargo, collecting its JSON output. We'll start building up the
// `deps` array of all files it generated along with a `toplevel` array of
// files we need to probe for later.
let stdout = BufReader::new(child.stdout.take().unwrap());
for line in stdout.lines() {
let line = t!(line);
match serde_json::from_str::<CargoMessage>(&line) {
Ok(msg) => cb(msg),
// If this was informational, just print it out and continue
// If this was informational, just print it out and continue.
Err(_) => println!("{}", line)
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/bootstrap/dist.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2222,8 +2222,8 @@ impl Step for Lldb {
}
}

// The lldb scripts might be installed in lib/python$version
// or in lib64/python$version. If lib64 exists, use it;
// The lldb scripts might be installed in `lib/python$version`
// or in `lib64/python$version`. If lib64 exists, use it;
// otherwise lib.
let libdir = builder.llvm_out(target).join("lib64");
let (libdir, libdir_name) = if libdir.exists() {
Expand Down
14 changes: 7 additions & 7 deletions src/bootstrap/doc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -567,14 +567,14 @@ impl Step for Test {
compiler
};

// Build libstd docs so that we generate relative links
// Build libstd docs so that we generate relative links.
builder.ensure(Std { stage, target });

builder.ensure(compile::Test { compiler, target });
let out_dir = builder.stage_out(compiler, Mode::Test)
.join(target).join("doc");

// See docs in std above for why we symlink
// See docs in std above for why we symlink.
let my_out = builder.crate_doc_out(target);
t!(symlink_dir_force(&builder.config, &my_out, &out_dir));

Expand Down Expand Up @@ -633,14 +633,14 @@ impl Step for WhitelistedRustc {
compiler
};

// Build libstd docs so that we generate relative links
// Build libstd docs so that we generate relative links.
builder.ensure(Std { stage, target });

builder.ensure(compile::Rustc { compiler, target });
let out_dir = builder.stage_out(compiler, Mode::Rustc)
.join(target).join("doc");

// See docs in std above for why we symlink
// See docs in std above for why we symlink.
let my_out = builder.crate_doc_out(target);
t!(symlink_dir_force(&builder.config, &my_out, &out_dir));

Expand Down Expand Up @@ -737,7 +737,7 @@ impl Step for Rustc {
for krate in &compiler_crates {
// Create all crate output directories first to make sure rustdoc uses
// relative links.
// FIXME: Cargo should probably do this itself.
// FIXME: cargo should probably do this itself.
t!(fs::create_dir_all(out_dir.join(krate)));
cargo.arg("-p").arg(krate);
}
Expand Down Expand Up @@ -879,7 +879,7 @@ impl Step for ErrorIndex {
index.arg("html");
index.arg(out.join("error-index.html"));

// FIXME: shouldn't have to pass this env var
// FIXME: shouldn't have to pass this env var.
index.env("CFG_BUILD", &builder.config.build)
.env("RUSTC_ERROR_METADATA_DST", builder.extended_error_dir());

Expand Down Expand Up @@ -936,7 +936,7 @@ fn symlink_dir_force(config: &Config, src: &Path, dst: &Path) -> io::Result<()>
if m.file_type().is_dir() {
fs::remove_dir_all(dst)?;
} else {
// handle directory junctions on windows by falling back to
// Handle directory junctions on Windows by falling back to
// `remove_dir`.
fs::remove_file(dst).or_else(|_| {
fs::remove_dir(dst)
Expand Down
6 changes: 3 additions & 3 deletions src/bootstrap/flags.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ pub struct Flags {
pub rustc_error_format: Option<String>,
pub dry_run: bool,

// true => deny
// `true` => deny
pub warnings: Option<bool>,
}

Expand Down Expand Up @@ -139,8 +139,8 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`"

// We can't use getopt to parse the options until we have completed specifying which
// options are valid, but under the current implementation, some options are conditional on
// the subcommand. Therefore we must manually identify the subcommand first, so that we can
// complete the definition of the options. Then we can use the getopt::Matches object from
// the subcommand. Therefore, we must manually identify the subcommand first, so that we can
// complete the definition of the options. Then we can use the `getopt::Matches` object from
// there on out.
let subcommand = args.iter().find(|&s| {
(s == "build")
Expand Down
Loading