diff --git a/benches/benchsuite/benches/global_cache_tracker.rs b/benches/benchsuite/benches/global_cache_tracker.rs index 71d5d526226..f879b07eb41 100644 --- a/benches/benchsuite/benches/global_cache_tracker.rs +++ b/benches/benchsuite/benches/global_cache_tracker.rs @@ -3,7 +3,7 @@ use cargo::core::global_cache_tracker::{self, DeferredGlobalLastUse, GlobalCacheTracker}; use cargo::util::cache_lock::CacheLockMode; use cargo::util::interning::InternedString; -use cargo::util::Config; +use cargo::util::GlobalContext; use criterion::{criterion_group, criterion_main, Criterion}; use std::fs; use std::path::{Path, PathBuf}; @@ -25,7 +25,7 @@ fn cargo_home() -> PathBuf { p } -fn initialize_config() -> Config { +fn initialize_context() -> GlobalContext { // Set up config. let shell = cargo::core::Shell::new(); let homedir = cargo_home(); @@ -33,42 +33,41 @@ fn initialize_config() -> Config { fs::create_dir_all(&homedir).unwrap(); } let cwd = homedir.clone(); - let mut config = Config::new(shell, cwd, homedir); - config.nightly_features_allowed = true; - config.set_search_stop_path(root()); - config - .configure( - 0, - false, - None, - false, - false, - false, - &None, - &["gc".to_string()], - &[], - ) - .unwrap(); + let mut gctx = GlobalContext::new(shell, cwd, homedir); + gctx.nightly_features_allowed = true; + gctx.set_search_stop_path(root()); + gctx.configure( + 0, + false, + None, + false, + false, + false, + &None, + &["gc".to_string()], + &[], + ) + .unwrap(); // Set up database sample. - let db_path = GlobalCacheTracker::db_path(&config).into_path_unlocked(); + let db_path = GlobalCacheTracker::db_path(&gctx).into_path_unlocked(); if db_path.exists() { fs::remove_file(&db_path).unwrap(); } let sample = Path::new(env!("CARGO_MANIFEST_DIR")).join(GLOBAL_CACHE_SAMPLE); fs::copy(sample, &db_path).unwrap(); - config + gctx } /// Benchmarks how long it takes to initialize `GlobalCacheTracker` with an already /// existing full database. fn global_tracker_init(c: &mut Criterion) { - let config = initialize_config(); - let _lock = config + let gctx = initialize_context(); + let _lock = gctx .acquire_package_cache_lock(CacheLockMode::DownloadExclusive) .unwrap(); c.bench_function("global_tracker_init", |b| { b.iter(|| { - GlobalCacheTracker::new(&config).unwrap(); + GlobalCacheTracker::new(&gctx).unwrap(); }) }); } @@ -76,12 +75,12 @@ fn global_tracker_init(c: &mut Criterion) { /// Benchmarks how long it takes to save a `GlobalCacheTracker` when there are zero /// updates. fn global_tracker_empty_save(c: &mut Criterion) { - let config = initialize_config(); - let _lock = config + let gctx = initialize_context(); + let _lock = gctx .acquire_package_cache_lock(CacheLockMode::DownloadExclusive) .unwrap(); let mut deferred = DeferredGlobalLastUse::new(); - let mut tracker = GlobalCacheTracker::new(&config).unwrap(); + let mut tracker = GlobalCacheTracker::new(&gctx).unwrap(); c.bench_function("global_tracker_empty_save", |b| { b.iter(|| { @@ -112,12 +111,12 @@ fn load_random_sample() -> Vec<(InternedString, InternedString, u64)> { /// This runs for different sizes of number of crates to update (selecting /// from the random sample stored on disk). fn global_tracker_update(c: &mut Criterion) { - let config = initialize_config(); - let _lock = config + let gctx = initialize_context(); + let _lock = gctx .acquire_package_cache_lock(CacheLockMode::DownloadExclusive) .unwrap(); let sample = Path::new(env!("CARGO_MANIFEST_DIR")).join(GLOBAL_CACHE_SAMPLE); - let db_path = GlobalCacheTracker::db_path(&config).into_path_unlocked(); + let db_path = GlobalCacheTracker::db_path(&gctx).into_path_unlocked(); let random_sample = load_random_sample(); @@ -129,7 +128,7 @@ fn global_tracker_update(c: &mut Criterion) { fs::copy(&sample, &db_path).unwrap(); let mut deferred = DeferredGlobalLastUse::new(); - let mut tracker = GlobalCacheTracker::new(&config).unwrap(); + let mut tracker = GlobalCacheTracker::new(&gctx).unwrap(); group.bench_with_input(size.to_string(), &size, |b, &size| { b.iter(|| { for (encoded_registry_name, name, size) in &random_sample[..size] { diff --git a/benches/benchsuite/benches/resolve.rs b/benches/benchsuite/benches/resolve.rs index e235441e1e2..89d0212e378 100644 --- a/benches/benchsuite/benches/resolve.rs +++ b/benches/benchsuite/benches/resolve.rs @@ -4,27 +4,27 @@ use cargo::core::resolver::features::{FeatureOpts, FeatureResolver}; use cargo::core::resolver::{CliFeatures, ForceAllTargets, HasDevUnits, ResolveBehavior}; use cargo::core::{PackageIdSpec, Workspace}; use cargo::ops::WorkspaceResolve; -use cargo::Config; +use cargo::GlobalContext; use criterion::{criterion_group, criterion_main, Criterion}; use std::path::Path; -struct ResolveInfo<'cfg> { - ws: Workspace<'cfg>, +struct ResolveInfo<'gctx> { + ws: Workspace<'gctx>, requested_kinds: [CompileKind; 1], - target_data: RustcTargetData<'cfg>, + target_data: RustcTargetData<'gctx>, cli_features: CliFeatures, specs: Vec, has_dev_units: HasDevUnits, force_all_targets: ForceAllTargets, - ws_resolve: WorkspaceResolve<'cfg>, + ws_resolve: WorkspaceResolve<'gctx>, } /// Helper for resolving a workspace. This will run the resolver once to /// download everything, and returns all the data structures that are used /// during resolution. -fn do_resolve<'cfg>(config: &'cfg Config, ws_root: &Path) -> ResolveInfo<'cfg> { +fn do_resolve<'gctx>(gctx: &'gctx GlobalContext, ws_root: &Path) -> ResolveInfo<'gctx> { let requested_kinds = [CompileKind::Host]; - let ws = Workspace::new(&ws_root.join("Cargo.toml"), config).unwrap(); + let ws = Workspace::new(&ws_root.join("Cargo.toml"), gctx).unwrap(); let mut target_data = RustcTargetData::new(&ws, &requested_kinds).unwrap(); let cli_features = CliFeatures::from_command_line(&[], false, true).unwrap(); let pkgs = cargo::ops::Packages::Default; @@ -62,7 +62,7 @@ fn resolve_ws(c: &mut Criterion) { let fixtures = fixtures!(); let mut group = c.benchmark_group("resolve_ws"); for (ws_name, ws_root) in fixtures.workspaces() { - let config = fixtures.make_config(&ws_root); + let gctx = fixtures.make_context(&ws_root); // The resolver info is initialized only once in a lazy fashion. This // allows criterion to skip this workspace if the user passes a filter // on the command-line (like `cargo bench -- resolve_ws/tikv`). @@ -81,7 +81,7 @@ fn resolve_ws(c: &mut Criterion) { has_dev_units, force_all_targets, .. - } = lazy_info.get_or_insert_with(|| do_resolve(&config, &ws_root)); + } = lazy_info.get_or_insert_with(|| do_resolve(&gctx, &ws_root)); b.iter(|| { cargo::ops::resolve_ws_with_opts( ws, @@ -104,7 +104,7 @@ fn feature_resolver(c: &mut Criterion) { let fixtures = fixtures!(); let mut group = c.benchmark_group("feature_resolver"); for (ws_name, ws_root) in fixtures.workspaces() { - let config = fixtures.make_config(&ws_root); + let gctx = fixtures.make_context(&ws_root); let mut lazy_info = None; group.bench_function(&ws_name, |b| { let ResolveInfo { @@ -116,7 +116,7 @@ fn feature_resolver(c: &mut Criterion) { has_dev_units, ws_resolve, .. - } = lazy_info.get_or_insert_with(|| do_resolve(&config, &ws_root)); + } = lazy_info.get_or_insert_with(|| do_resolve(&gctx, &ws_root)); b.iter(|| { let feature_opts = FeatureOpts::new_behavior(ResolveBehavior::V2, *has_dev_units); FeatureResolver::resolve( diff --git a/benches/benchsuite/benches/workspace_initialization.rs b/benches/benchsuite/benches/workspace_initialization.rs index af68efe76ce..b94539f576e 100644 --- a/benches/benchsuite/benches/workspace_initialization.rs +++ b/benches/benchsuite/benches/workspace_initialization.rs @@ -6,12 +6,12 @@ fn workspace_initialization(c: &mut Criterion) { let fixtures = fixtures!(); let mut group = c.benchmark_group("workspace_initialization"); for (ws_name, ws_root) in fixtures.workspaces() { - let config = fixtures.make_config(&ws_root); + let gctx = fixtures.make_context(&ws_root); // The resolver info is initialized only once in a lazy fashion. This // allows criterion to skip this workspace if the user passes a filter // on the command-line (like `cargo bench -- workspace_initialization/tikv`). group.bench_function(ws_name, |b| { - b.iter(|| Workspace::new(&ws_root.join("Cargo.toml"), &config).unwrap()) + b.iter(|| Workspace::new(&ws_root.join("Cargo.toml"), &gctx).unwrap()) }); } group.finish(); diff --git a/benches/benchsuite/src/bin/capture-last-use.rs b/benches/benchsuite/src/bin/capture-last-use.rs index 3034d49ac97..dc226109baa 100644 --- a/benches/benchsuite/src/bin/capture-last-use.rs +++ b/benches/benchsuite/src/bin/capture-last-use.rs @@ -15,7 +15,7 @@ use cargo::core::global_cache_tracker::{self, DeferredGlobalLastUse, GlobalCacheTracker}; use cargo::util::cache_lock::CacheLockMode; use cargo::util::interning::InternedString; -use cargo::Config; +use cargo::GlobalContext; use rand::prelude::SliceRandom; use std::collections::HashMap; use std::fs; @@ -28,30 +28,29 @@ fn main() { let shell = cargo::core::Shell::new(); let homedir = Path::new(env!("CARGO_MANIFEST_DIR")).join("global-cache-tracker"); let cwd = homedir.clone(); - let mut config = Config::new(shell, cwd, homedir.clone()); - config - .configure( - 0, - false, - None, - false, - false, - false, - &None, - &["gc".to_string()], - &[], - ) - .unwrap(); - let db_path = GlobalCacheTracker::db_path(&config).into_path_unlocked(); + let mut gctx = GlobalContext::new(shell, cwd, homedir.clone()); + gctx.configure( + 0, + false, + None, + false, + false, + false, + &None, + &["gc".to_string()], + &[], + ) + .unwrap(); + let db_path = GlobalCacheTracker::db_path(&gctx).into_path_unlocked(); if db_path.exists() { fs::remove_file(&db_path).unwrap(); } - let _lock = config + let _lock = gctx .acquire_package_cache_lock(CacheLockMode::DownloadExclusive) .unwrap(); let mut deferred = DeferredGlobalLastUse::new(); - let mut tracker = GlobalCacheTracker::new(&config).unwrap(); + let mut tracker = GlobalCacheTracker::new(&gctx).unwrap(); let real_home = cargo::util::homedir(&std::env::current_dir().unwrap()).unwrap(); diff --git a/benches/benchsuite/src/lib.rs b/benches/benchsuite/src/lib.rs index f2771084191..327c04c4f05 100644 --- a/benches/benchsuite/src/lib.rs +++ b/benches/benchsuite/src/lib.rs @@ -1,6 +1,6 @@ #![allow(clippy::disallowed_methods)] -use cargo::Config; +use cargo::GlobalContext; use std::fs; use std::path::{Path, PathBuf}; use std::process::Command; @@ -175,25 +175,24 @@ impl Fixtures { .collect() } - /// Creates a new Config. - pub fn make_config(&self, ws_root: &Path) -> Config { + /// Creates a new Context. + pub fn make_context(&self, ws_root: &Path) -> GlobalContext { let shell = cargo::core::Shell::new(); - let mut config = Config::new(shell, ws_root.to_path_buf(), self.cargo_home()); + let mut gctx = GlobalContext::new(shell, ws_root.to_path_buf(), self.cargo_home()); // Configure is needed to set the target_dir which is needed to write // the .rustc_info.json file which is very expensive. - config - .configure( - 0, - false, - None, - false, - false, - false, - &Some(self.target_dir()), - &[], - &[], - ) - .unwrap(); - config + gctx.configure( + 0, + false, + None, + false, + false, + false, + &Some(self.target_dir()), + &[], + &[], + ) + .unwrap(); + gctx } } diff --git a/crates/mdman/src/hbs.rs b/crates/mdman/src/hbs.rs index 33cfc2b5184..dee0a2fff39 100644 --- a/crates/mdman/src/hbs.rs +++ b/crates/mdman/src/hbs.rs @@ -99,7 +99,7 @@ impl HelperDef for OptionHelper<'_> { &self, h: &Helper<'rc>, r: &'reg Handlebars<'reg>, - ctx: &'rc Context, + gctx: &'rc Context, rc: &mut RenderContext<'reg, 'rc>, out: &mut dyn Output, ) -> HelperResult { @@ -137,10 +137,10 @@ impl HelperDef for OptionHelper<'_> { } }; // Render the block. - let block = t.renders(r, ctx, rc)?; + let block = t.renders(r, gctx, rc)?; // Get the name of this page. - let man_name = ctx + let man_name = gctx .data() .get("man_name") .expect("expected man_name in context") @@ -167,7 +167,7 @@ impl HelperDef for ManLinkHelper<'_> { &self, h: &Helper<'rc>, _r: &'reg Handlebars<'reg>, - _ctx: &'rc Context, + _gctx: &'rc Context, _rc: &mut RenderContext<'reg, 'rc>, out: &mut dyn Output, ) -> HelperResult { @@ -200,7 +200,7 @@ impl HelperDef for ManLinkHelper<'_> { fn set_decorator( d: &Decorator<'_>, _: &Handlebars<'_>, - _ctx: &Context, + _gctx: &Context, rc: &mut RenderContext<'_, '_>, ) -> Result<(), RenderError> { let data_to_set = d.hash(); @@ -212,13 +212,13 @@ fn set_decorator( /// Sets a variable to a value within the context. fn set_in_context(rc: &mut RenderContext<'_, '_>, key: &str, value: serde_json::Value) { - let mut ctx = match rc.context() { + let mut gctx = match rc.context() { Some(c) => (*c).clone(), None => Context::wraps(serde_json::Value::Object(serde_json::Map::new())).unwrap(), }; - if let serde_json::Value::Object(m) = ctx.data_mut() { + if let serde_json::Value::Object(m) = gctx.data_mut() { m.insert(key.to_string(), value); - rc.set_context(ctx); + rc.set_context(gctx); } else { panic!("expected object in context"); } @@ -226,11 +226,11 @@ fn set_in_context(rc: &mut RenderContext<'_, '_>, key: &str, value: serde_json:: /// Removes a variable from the context. fn remove_from_context(rc: &mut RenderContext<'_, '_>, key: &str) { - let ctx = rc.context().expect("cannot remove from null context"); - let mut ctx = (*ctx).clone(); - if let serde_json::Value::Object(m) = ctx.data_mut() { + let gctx = rc.context().expect("cannot remove from null context"); + let mut gctx = (*gctx).clone(); + if let serde_json::Value::Object(m) = gctx.data_mut() { m.remove(key); - rc.set_context(ctx); + rc.set_context(gctx); } else { panic!("expected object in context"); } diff --git a/crates/resolver-tests/src/lib.rs b/crates/resolver-tests/src/lib.rs index 20cb5c03dcc..39bd8880436 100644 --- a/crates/resolver-tests/src/lib.rs +++ b/crates/resolver-tests/src/lib.rs @@ -18,7 +18,7 @@ use cargo::core::{Dependency, PackageId, Registry, Summary}; use cargo::core::{GitReference, SourceId}; use cargo::sources::source::QueryKind; use cargo::sources::IndexSummary; -use cargo::util::{CargoResult, Config, IntoUrl}; +use cargo::util::{CargoResult, GlobalContext, IntoUrl}; use cargo_util_schemas::manifest::RustVersion; use proptest::collection::{btree_map, vec}; @@ -28,7 +28,7 @@ use proptest::string::string_regex; use varisat::ExtendFormula; pub fn resolve(deps: Vec, registry: &[Summary]) -> CargoResult> { - resolve_with_config(deps, registry, &Config::default().unwrap()) + resolve_with_global_context(deps, registry, &GlobalContext::default().unwrap()) } pub fn resolve_and_validated( @@ -36,7 +36,8 @@ pub fn resolve_and_validated( registry: &[Summary], sat_resolve: Option, ) -> CargoResult> { - let resolve = resolve_with_config_raw(deps.clone(), registry, &Config::default().unwrap()); + let resolve = + resolve_with_global_context_raw(deps.clone(), registry, &GlobalContext::default().unwrap()); match resolve { Err(e) => { @@ -83,19 +84,19 @@ pub fn resolve_and_validated( } } -pub fn resolve_with_config( +pub fn resolve_with_global_context( deps: Vec, registry: &[Summary], - config: &Config, + gctx: &GlobalContext, ) -> CargoResult> { - let resolve = resolve_with_config_raw(deps, registry, config)?; + let resolve = resolve_with_global_context_raw(deps, registry, gctx)?; Ok(resolve.sort()) } -pub fn resolve_with_config_raw( +pub fn resolve_with_global_context_raw( deps: Vec, registry: &[Summary], - config: &Config, + gctx: &GlobalContext, ) -> CargoResult { struct MyRegistry<'a> { list: &'a [Summary], @@ -166,7 +167,7 @@ pub fn resolve_with_config_raw( let opts = ResolveOpts::everything(); let start = Instant::now(); let mut version_prefs = VersionPreferences::default(); - if config.cli_unstable().minimal_versions { + if gctx.cli_unstable().minimal_versions { version_prefs.version_ordering(VersionOrdering::MinimumVersionsFirst) } let resolve = resolver::resolve( @@ -175,7 +176,7 @@ pub fn resolve_with_config_raw( &mut registry, &version_prefs, ResolveVersion::with_rust_version(None), - Some(config), + Some(gctx), ); // The largest test in our suite takes less then 30 sec. @@ -245,6 +246,7 @@ fn sat_at_most_one_by_key( /// so the selected packages may not match the real resolver. #[derive(Clone)] pub struct SatResolve(Rc>); + struct SatResolveInner { solver: varisat::Solver<'static>, var_for_is_packages_used: HashMap, @@ -518,9 +520,11 @@ pub fn remove_dep(sum: &Summary, ind: usize) -> Summary { pub fn dep(name: &str) -> Dependency { dep_req(name, "*") } + pub fn dep_req(name: &str, req: &str) -> Dependency { Dependency::parse(name, Some(req), registry_loc()).unwrap() } + pub fn dep_req_kind(name: &str, req: &str, kind: DepKind) -> Dependency { let mut dep = dep_req(name, req); dep.set_kind(kind); @@ -533,6 +537,7 @@ pub fn dep_loc(name: &str, location: &str) -> Dependency { let source_id = SourceId::for_git(&url, master).unwrap(); Dependency::parse(name, Some("1.0.0"), source_id).unwrap() } + pub fn dep_kind(name: &str, kind: DepKind) -> Dependency { dep(name).set_kind(kind).clone() } diff --git a/crates/resolver-tests/tests/resolve.rs b/crates/resolver-tests/tests/resolve.rs index 662bad90f47..2728660b2bf 100644 --- a/crates/resolver-tests/tests/resolve.rs +++ b/crates/resolver-tests/tests/resolve.rs @@ -2,13 +2,13 @@ use std::io::IsTerminal; use cargo::core::dependency::DepKind; use cargo::core::Dependency; -use cargo::util::Config; +use cargo::util::GlobalContext; use cargo_util::is_ci; use resolver_tests::{ assert_contains, assert_same, dep, dep_kind, dep_loc, dep_req, loc_names, names, pkg, pkg_id, pkg_loc, registry, registry_strategy, remove_dep, resolve, resolve_and_validated, - resolve_with_config, PrettyPrintRegistry, SatResolve, ToDep, ToPkgId, + resolve_with_global_context, PrettyPrintRegistry, SatResolve, ToDep, ToPkgId, }; use proptest::prelude::*; @@ -58,9 +58,9 @@ proptest! { fn prop_minimum_version_errors_the_same( PrettyPrintRegistry(input) in registry_strategy(50, 20, 60) ) { - let mut config = Config::default().unwrap(); - config.nightly_features_allowed = true; - config + let mut gctx = GlobalContext::default().unwrap(); + gctx.nightly_features_allowed = true; + gctx .configure( 1, false, @@ -86,10 +86,10 @@ proptest! { ®, ); - let mres = resolve_with_config( + let mres = resolve_with_global_context( vec![dep_req(&this.name(), &format!("={}", this.version()))], ®, - &config, + &gctx, ); prop_assert_eq!( @@ -107,9 +107,9 @@ proptest! { fn prop_direct_minimum_version_error_implications( PrettyPrintRegistry(input) in registry_strategy(50, 20, 60) ) { - let mut config = Config::default().unwrap(); - config.nightly_features_allowed = true; - config + let mut gctx = GlobalContext::default().unwrap(); + gctx.nightly_features_allowed = true; + gctx .configure( 1, false, @@ -135,10 +135,10 @@ proptest! { ®, ); - let mres = resolve_with_config( + let mres = resolve_with_global_context( vec![dep_req(&this.name(), &format!("={}", this.version()))], ®, - &config, + &gctx, ); if res.is_err() { @@ -435,31 +435,30 @@ fn test_resolving_minimum_version_with_transitive_deps() { pkg!("bar" => [dep_req("util", ">=1.0.1")]), ]); - let mut config = Config::default().unwrap(); + let mut gctx = GlobalContext::default().unwrap(); // -Z minimal-versions // When the minimal-versions config option is specified then the lowest // possible version of a package should be selected. "util 1.0.0" can't be // selected because of the requirements of "bar", so the minimum version // must be 1.1.1. - config.nightly_features_allowed = true; - config - .configure( - 1, - false, - None, - false, - false, - false, - &None, - &["minimal-versions".to_string()], - &[], - ) - .unwrap(); - - let res = resolve_with_config( + gctx.nightly_features_allowed = true; + gctx.configure( + 1, + false, + None, + false, + false, + false, + &None, + &["minimal-versions".to_string()], + &[], + ) + .unwrap(); + + let res = resolve_with_global_context( vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")], ®, - &config, + &gctx, ) .unwrap(); diff --git a/crates/xtask-bump-check/src/main.rs b/crates/xtask-bump-check/src/main.rs index 11242696fa2..55e8b746a08 100644 --- a/crates/xtask-bump-check/src/main.rs +++ b/crates/xtask-bump-check/src/main.rs @@ -6,12 +6,12 @@ fn main() { let cli = xtask::cli(); let matches = cli.get_matches(); - let mut config = cargo::util::config::Config::default().unwrap_or_else(|e| { + let mut gctx = cargo::util::config::GlobalContext::default().unwrap_or_else(|e| { let mut eval = cargo::core::shell::Shell::new(); cargo::exit_with_error(e.into(), &mut eval) }); - if let Err(e) = xtask::exec(&matches, &mut config) { - cargo::exit_with_error(e, &mut config.shell()) + if let Err(e) = xtask::exec(&matches, &mut gctx) { + cargo::exit_with_error(e, &mut gctx.shell()) } } diff --git a/crates/xtask-bump-check/src/xtask.rs b/crates/xtask-bump-check/src/xtask.rs index 6d933c9a99d..2ae97b2f7df 100644 --- a/crates/xtask-bump-check/src/xtask.rs +++ b/crates/xtask-bump-check/src/xtask.rs @@ -67,15 +67,15 @@ pub fn cli() -> clap::Command { ) } -pub fn exec(args: &clap::ArgMatches, config: &mut cargo::util::Config) -> cargo::CliResult { - config_configure(config, args)?; +pub fn exec(args: &clap::ArgMatches, gctx: &mut cargo::util::GlobalContext) -> cargo::CliResult { + global_context_configure(gctx, args)?; - bump_check(args, config)?; + bump_check(args, gctx)?; Ok(()) } -fn config_configure(config: &mut Config, args: &ArgMatches) -> CliResult { +fn global_context_configure(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { let verbose = args.verbose(); // quiet is unusual because it is redefined in some subcommands in order // to provide custom help text. @@ -92,7 +92,7 @@ fn config_configure(config: &mut Config, args: &ArgMatches) -> CliResult { if let Some(values) = args.get_many::("config") { config_args.extend(values.cloned()); } - config.configure( + gctx.configure( verbose, quiet, color, @@ -109,14 +109,14 @@ fn config_configure(config: &mut Config, args: &ArgMatches) -> CliResult { /// Main entry of `xtask-bump-check`. /// /// Assumption: version number are incremental. We never have point release for old versions. -fn bump_check(args: &clap::ArgMatches, config: &cargo::util::Config) -> CargoResult<()> { - let ws = args.workspace(config)?; +fn bump_check(args: &clap::ArgMatches, gctx: &cargo::util::GlobalContext) -> CargoResult<()> { + let ws = args.workspace(gctx)?; let repo = git2::Repository::open(ws.root())?; - let base_commit = get_base_commit(config, args, &repo)?; + let base_commit = get_base_commit(gctx, args, &repo)?; let head_commit = get_head_commit(args, &repo)?; let referenced_commit = get_referenced_commit(&repo, &base_commit)?; let changed_members = changed(&ws, &repo, &base_commit, &head_commit)?; - let status = |msg: &str| config.shell().status(STATUS, msg); + let status = |msg: &str| gctx.shell().status(STATUS, msg); // Don't check against beta and stable branches, // as the publish of these crates are not tied with Rust release process. @@ -128,7 +128,7 @@ fn bump_check(args: &clap::ArgMatches, config: &cargo::util::Config) -> CargoRes let mut needs_bump = Vec::new(); - check_crates_io(config, &changed_members, &mut needs_bump)?; + check_crates_io(gctx, &changed_members, &mut needs_bump)?; if let Some(referenced_commit) = referenced_commit.as_ref() { status(&format!("compare against `{}`", referenced_commit.id()))?; @@ -169,7 +169,7 @@ fn bump_check(args: &clap::ArgMatches, config: &cargo::util::Config) -> CargoRes cmd.arg("semver-checks") .arg("check-release") .arg("--workspace"); - config.shell().status("Running", &cmd)?; + gctx.shell().status("Running", &cmd)?; cmd.exec()?; if let Some(referenced_commit) = referenced_commit.as_ref() { @@ -181,7 +181,7 @@ fn bump_check(args: &clap::ArgMatches, config: &cargo::util::Config) -> CargoRes for krate in crates_not_check_against_channels { cmd.args(&["--exclude", krate]); } - config.shell().status("Running", &cmd)?; + gctx.shell().status("Running", &cmd)?; cmd.exec()?; } @@ -192,7 +192,7 @@ fn bump_check(args: &clap::ArgMatches, config: &cargo::util::Config) -> CargoRes /// Returns the commit of upstream `master` branch if `base-rev` is missing. fn get_base_commit<'a>( - config: &Config, + gctx: &GlobalContext, args: &clap::ArgMatches, repo: &'a git2::Repository, ) -> CargoResult> { @@ -222,7 +222,7 @@ fn get_base_commit<'a>( let upstream_ref = upstream_branches[0].get(); if upstream_branches.len() > 1 { let name = upstream_ref.name().expect("name is valid UTF-8"); - let _ = config.shell().warn(format!( + let _ = gctx.shell().warn(format!( "multiple `{UPSTREAM_BRANCH}` found, picking {name}" )); } @@ -358,15 +358,15 @@ fn changed<'r, 'ws>( /// /// Assumption: We always release a version larger than all existing versions. fn check_crates_io<'a>( - config: &Config, + gctx: &GlobalContext, changed_members: &HashMap<&'a str, &'a Package>, needs_bump: &mut Vec<&'a Package>, ) -> CargoResult<()> { - let source_id = SourceId::crates_io(config)?; - let mut registry = PackageRegistry::new(config)?; - let _lock = config.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?; + let source_id = SourceId::crates_io(gctx)?; + let mut registry = PackageRegistry::new(gctx)?; + let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?; registry.lock_patches(); - config.shell().status( + gctx.shell().status( STATUS, format_args!("compare against `{}`", source_id.display_registry_name()), )?; @@ -402,11 +402,11 @@ fn check_crates_io<'a>( } /// Checkouts a temporary workspace to do further version comparisons. -fn checkout_ws<'cfg, 'a>( - ws: &Workspace<'cfg>, +fn checkout_ws<'gctx, 'a>( + ws: &Workspace<'gctx>, repo: &'a git2::Repository, referenced_commit: &git2::Commit<'a>, -) -> CargoResult> { +) -> CargoResult> { let repo_path = repo.path().as_os_str().to_str().unwrap(); // Put it under `target/cargo-` let short_id = &referenced_commit.id().to_string()[..7]; @@ -418,7 +418,7 @@ fn checkout_ws<'cfg, 'a>( .clone(repo_path, checkout_path)?; let obj = new_repo.find_object(referenced_commit.id(), None)?; new_repo.reset(&obj, git2::ResetType::Hard, None)?; - Workspace::new(&checkout_path.join("Cargo.toml"), ws.config()) + Workspace::new(&checkout_path.join("Cargo.toml"), ws.gctx()) } #[test] diff --git a/src/bin/cargo/cli.rs b/src/bin/cargo/cli.rs index 89223bfb80f..f80fa87d250 100644 --- a/src/bin/cargo/cli.rs +++ b/src/bin/cargo/cli.rs @@ -1,7 +1,7 @@ use anyhow::{anyhow, Context as _}; use cargo::core::shell::Shell; use cargo::core::{features, CliUnstable}; -use cargo::{drop_print, drop_println, CargoResult}; +use cargo::{drop_print, drop_println, CargoResult, GlobalContext}; use clap::builder::UnknownArgumentValueParser; use itertools::Itertools; use std::collections::HashMap; @@ -15,12 +15,12 @@ use crate::command_prelude::*; use crate::util::is_rustup; use cargo::util::style; -pub fn main(config: &mut LazyConfig) -> CliResult { +pub fn main(lazy_gctx: &mut LazyContext) -> CliResult { let args = cli().try_get_matches()?; // Update the process-level notion of cwd // This must be completed before config is initialized - assert_eq!(config.is_init(), false); + assert_eq!(lazy_gctx.is_init(), false); if let Some(new_cwd) = args.get_one::("directory") { // This is a temporary hack. This cannot access `Config`, so this is a bit messy. // This does not properly parse `-Z` flags that appear after the subcommand. @@ -45,9 +45,9 @@ pub fn main(config: &mut LazyConfig) -> CliResult { // CAUTION: Be careful with using `config` until it is configured below. // In general, try to avoid loading config values unless necessary (like // the [alias] table). - let config = config.get_mut(); + let gctx = lazy_gctx.get_mut(); - let (expanded_args, global_args) = expand_aliases(config, args, vec![])?; + let (expanded_args, global_args) = expand_aliases(gctx, args, vec![])?; if expanded_args .get_one::("unstable-features") @@ -75,7 +75,7 @@ pub fn main(config: &mut LazyConfig) -> CliResult { }) .join("\n"); drop_println!( - config, + gctx, "\ {header}Available unstable (nightly-only) flags:{header:#} @@ -83,9 +83,9 @@ pub fn main(config: &mut LazyConfig) -> CliResult { Run with `{literal}cargo -Z{literal:#} {placeholder}[FLAG] [COMMAND]{placeholder:#}`", ); - if !config.nightly_features_allowed { + if !gctx.nightly_features_allowed { drop_println!( - config, + gctx, "\nUnstable flags are only available on the nightly channel \ of Cargo, but this is the `{}` channel.\n\ {}", @@ -94,7 +94,7 @@ Run with `{literal}cargo -Z{literal:#} {placeholder}[FLAG] [COMMAND]{placeholder ); } drop_println!( - config, + gctx, "\nSee https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \ for more information about these flags." ); @@ -104,12 +104,12 @@ Run with `{literal}cargo -Z{literal:#} {placeholder}[FLAG] [COMMAND]{placeholder let is_verbose = expanded_args.verbose() > 0; if expanded_args.flag("version") { let version = get_version_string(is_verbose); - drop_print!(config, "{}", version); + drop_print!(gctx, "{}", version); return Ok(()); } if let Some(code) = expanded_args.get_one::("explain") { - let mut procss = config.load_global_rustc(None)?.process(); + let mut procss = gctx.load_global_rustc(None)?.process(); procss.arg("--explain").arg(code).exec()?; return Ok(()); } @@ -130,10 +130,10 @@ Run with `{literal}cargo -Z{literal:#} {placeholder}[FLAG] [COMMAND]{placeholder ), ]); drop_println!( - config, + gctx, color_print::cstr!("Installed Commands:") ); - for (name, command) in list_commands(config) { + for (name, command) in list_commands(gctx) { let known_external_desc = known_external_command_descriptions.get(name.as_str()); let literal = style::LITERAL; match command { @@ -144,24 +144,24 @@ Run with `{literal}cargo -Z{literal:#} {placeholder}[FLAG] [COMMAND]{placeholder ); let summary = about.unwrap_or_default(); let summary = summary.lines().next().unwrap_or(&summary); // display only the first line - drop_println!(config, " {literal}{name:<20}{literal:#} {summary}"); + drop_println!(gctx, " {literal}{name:<20}{literal:#} {summary}"); } CommandInfo::External { path } => { if let Some(desc) = known_external_desc { - drop_println!(config, " {literal}{name:<20}{literal:#} {desc}"); + drop_println!(gctx, " {literal}{name:<20}{literal:#} {desc}"); } else if is_verbose { drop_println!( - config, + gctx, " {literal}{name:<20}{literal:#} {}", path.display() ); } else { - drop_println!(config, " {literal}{name}{literal:#}"); + drop_println!(gctx, " {literal}{name}{literal:#}"); } } CommandInfo::Alias { target } => { drop_println!( - config, + gctx, " {literal}{name:<20}{literal:#} alias: {}", target.iter().join(" ") ); @@ -180,10 +180,10 @@ Run with `{literal}cargo -Z{literal:#} {placeholder}[FLAG] [COMMAND]{placeholder } }; let exec = Exec::infer(cmd)?; - config_configure(config, &expanded_args, subcommand_args, global_args, &exec)?; - super::init_git(config); + config_configure(gctx, &expanded_args, subcommand_args, global_args, &exec)?; + super::init_git(gctx); - exec.exec(config, subcommand_args) + exec.exec(gctx, subcommand_args) } pub fn get_version_string(is_verbose: bool) -> String { @@ -259,18 +259,18 @@ fn add_ssl(version_string: &mut String) { /// clap code for extracting a subcommand discards global options /// (appearing before the subcommand). fn expand_aliases( - config: &mut Config, + gctx: &mut GlobalContext, args: ArgMatches, mut already_expanded: Vec, ) -> Result<(ArgMatches, GlobalArgs), CliError> { if let Some((cmd, sub_args)) = args.subcommand() { let exec = commands::builtin_exec(cmd); - let aliased_cmd = super::aliased_command(config, cmd); + let aliased_cmd = super::aliased_command(gctx, cmd); match (exec, aliased_cmd) { (Some(_), Ok(Some(_))) => { // User alias conflicts with a built-in subcommand - config.shell().warn(format!( + gctx.shell().warn(format!( "user-defined alias `{}` is ignored, because it is shadowed by a built-in command", cmd, ))?; @@ -299,8 +299,8 @@ To pass the arguments to the subcommand, remove `--`", // Currently this is only a warning, but after a transition period this will become // a hard error. if super::builtin_aliases_execs(cmd).is_none() { - if let Some(path) = super::find_external_subcommand(config, cmd) { - config.shell().warn(format!( + if let Some(path) = super::find_external_subcommand(gctx, cmd) { + gctx.shell().warn(format!( "\ user-defined alias `{}` is shadowing an external subcommand found at: `{}` This was previously accepted but is being phased out; it will become a hard error in a future release. @@ -311,10 +311,10 @@ For more information, see issue #10049 return Err(e.into()), @@ -364,13 +364,13 @@ For more information, see issue #12207 CliResult { - let arg_target_dir = &subcommand_args.value_of_path("target-dir", config); + let arg_target_dir = &subcommand_args.value_of_path("target-dir", gctx); let mut verbose = global_args.verbose + args.verbose(); // quiet is unusual because it is redefined in some subcommands in order // to provide custom help text. @@ -383,7 +383,7 @@ fn config_configure( // subject to change. if let Some(lower) = verbose.checked_sub(1) { verbose = lower; - } else if !config.shell().is_err_tty() { + } else if !gctx.shell().is_err_tty() { // Don't pollute potentially-scripted output quiet = true; } @@ -404,7 +404,7 @@ fn config_configure( if let Some(values) = args.get_many::("config") { config_args.extend(values.cloned()); } - config.configure( + gctx.configure( verbose, quiet, color, @@ -443,26 +443,26 @@ impl Exec { } } - fn exec(self, config: &mut Config, subcommand_args: &ArgMatches) -> CliResult { + fn exec(self, gctx: &mut GlobalContext, subcommand_args: &ArgMatches) -> CliResult { match self { - Self::Builtin(exec) => exec(config, subcommand_args), + Self::Builtin(exec) => exec(gctx, subcommand_args), Self::Manifest(cmd) => { - let ext_path = super::find_external_subcommand(config, &cmd); - if !config.cli_unstable().script && ext_path.is_some() { - config.shell().warn(format_args!( + let ext_path = super::find_external_subcommand(gctx, &cmd); + if !gctx.cli_unstable().script && ext_path.is_some() { + gctx.shell().warn(format_args!( "\ external subcommand `{cmd}` has the appearance of a manifest-command This was previously accepted but will be phased out when `-Zscript` is stabilized. For more information, see issue #12207 .", ))?; - Self::External(cmd).exec(config, subcommand_args) + Self::External(cmd).exec(gctx, subcommand_args) } else { let ext_args: Vec = subcommand_args .get_many::("") .unwrap_or_default() .cloned() .collect(); - commands::run::exec_manifest_command(config, &cmd, &ext_args) + commands::run::exec_manifest_command(gctx, &cmd, &ext_args) } } Self::External(cmd) => { @@ -473,7 +473,7 @@ For more information, see issue #12207 cargo help <>' for more information on a sp .subcommands(commands::builtin()) } -/// Delay loading [`Config`] until access. +/// Delay loading [`GlobalContext`] until access. /// -/// In the common path, the [`Config`] is dependent on CLI parsing and shouldn't be loaded until +/// In the common path, the [`GlobalContext`] is dependent on CLI parsing and shouldn't be loaded until /// after that is done but some other paths (like fix or earlier errors) might need access to it, /// so this provides a way to share the instance and the implementation across these different /// accesses. -pub struct LazyConfig { - config: Option, +pub struct LazyContext { + gctx: Option, } -impl LazyConfig { +impl LazyContext { pub fn new() -> Self { - Self { config: None } + Self { gctx: None } } /// Check whether the config is loaded /// /// This is useful for asserts in case the environment needs to be setup before loading pub fn is_init(&self) -> bool { - self.config.is_some() + self.gctx.is_some() } /// Get the config, loading it if needed /// /// On error, the process is terminated - pub fn get(&mut self) -> &Config { + pub fn get(&mut self) -> &GlobalContext { self.get_mut() } /// Get the config, loading it if needed /// /// On error, the process is terminated - pub fn get_mut(&mut self) -> &mut Config { - self.config.get_or_insert_with(|| match Config::default() { - Ok(cfg) => cfg, - Err(e) => { - let mut shell = Shell::new(); - cargo::exit_with_error(e.into(), &mut shell) - } - }) + pub fn get_mut(&mut self) -> &mut GlobalContext { + self.gctx + .get_or_insert_with(|| match GlobalContext::default() { + Ok(cfg) => cfg, + Err(e) => { + let mut shell = Shell::new(); + cargo::exit_with_error(e.into(), &mut shell) + } + }) } } diff --git a/src/bin/cargo/commands/add.rs b/src/bin/cargo/commands/add.rs index f4d2ee051c4..ca75e734622 100644 --- a/src/bin/cargo/commands/add.rs +++ b/src/bin/cargo/commands/add.rs @@ -167,11 +167,11 @@ Build-dependencies are the only dependencies available for use by build scripts ]) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { let dry_run = args.dry_run(); let section = parse_section(args); - let ws = args.workspace(config)?; + let ws = args.workspace(gctx)?; if args.is_present_with_zero_values("package") { print_available_packages(&ws)?; @@ -203,10 +203,10 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { } }; - let dependencies = parse_dependencies(config, args)?; + let dependencies = parse_dependencies(gctx, args)?; let ignore_rust_version = args.flag("ignore-rust-version"); - if ignore_rust_version && !config.cli_unstable().msrv_policy { + if ignore_rust_version && !gctx.cli_unstable().msrv_policy { return Err(CliError::new( anyhow::format_err!( "`--ignore-rust-version` is unstable; pass `-Zmsrv-policy` to enable support for it" @@ -217,7 +217,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let honor_rust_version = !ignore_rust_version; let options = AddOptions { - config, + gctx, spec, dependencies, section, @@ -228,21 +228,21 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { if !dry_run { // Reload the workspace since we've changed dependencies - let ws = args.workspace(config)?; + let ws = args.workspace(gctx)?; resolve_ws(&ws)?; } Ok(()) } -fn parse_dependencies(config: &Config, matches: &ArgMatches) -> CargoResult> { +fn parse_dependencies(gctx: &GlobalContext, matches: &ArgMatches) -> CargoResult> { let path = matches.get_one::("path"); let git = matches.get_one::("git"); let branch = matches.get_one::("branch"); let rev = matches.get_one::("rev"); let tag = matches.get_one::("tag"); let rename = matches.get_one::("rename"); - let registry = match matches.registry(config)? { + let registry = match matches.registry(gctx)? { Some(reg) if reg == CRATES_IO_REGISTRY => None, reg => reg, }; diff --git a/src/bin/cargo/commands/bench.rs b/src/bin/cargo/commands/bench.rs index 11bcf2eb9c6..9006ee18d86 100644 --- a/src/bin/cargo/commands/bench.rs +++ b/src/bin/cargo/commands/bench.rs @@ -56,18 +56,14 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; - let mut compile_opts = args.compile_options( - config, - CompileMode::Bench, - Some(&ws), - ProfileChecking::Custom, - )?; + let mut compile_opts = + args.compile_options(gctx, CompileMode::Bench, Some(&ws), ProfileChecking::Custom)?; compile_opts.build_config.requested_profile = - args.get_profile_name(config, "bench", ProfileChecking::Custom)?; + args.get_profile_name(gctx, "bench", ProfileChecking::Custom)?; let ops = TestOptions { no_run: args.flag("no-run"), diff --git a/src/bin/cargo/commands/build.rs b/src/bin/cargo/commands/build.rs index 0dde7bde9b7..8aba68b225a 100644 --- a/src/bin/cargo/commands/build.rs +++ b/src/bin/cargo/commands/build.rs @@ -45,25 +45,19 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; - let mut compile_opts = args.compile_options( - config, - CompileMode::Build, - Some(&ws), - ProfileChecking::Custom, - )?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; + let mut compile_opts = + args.compile_options(gctx, CompileMode::Build, Some(&ws), ProfileChecking::Custom)?; - if let Some(out_dir) = args.value_of_path("out-dir", config) { + if let Some(out_dir) = args.value_of_path("out-dir", gctx) { compile_opts.build_config.export_dir = Some(out_dir); - } else if let Some(out_dir) = config.build_config()?.out_dir.as_ref() { - let out_dir = out_dir.resolve_path(config); + } else if let Some(out_dir) = gctx.build_config()?.out_dir.as_ref() { + let out_dir = out_dir.resolve_path(gctx); compile_opts.build_config.export_dir = Some(out_dir); } if compile_opts.build_config.export_dir.is_some() { - config - .cli_unstable() - .fail_if_stable_opt("--out-dir", 6790)?; + gctx.cli_unstable().fail_if_stable_opt("--out-dir", 6790)?; } ops::compile(&ws, &compile_opts)?; Ok(()) diff --git a/src/bin/cargo/commands/check.rs b/src/bin/cargo/commands/check.rs index 199cbf3fe86..f72110165a9 100644 --- a/src/bin/cargo/commands/check.rs +++ b/src/bin/cargo/commands/check.rs @@ -42,8 +42,8 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; // This is a legacy behavior that causes `cargo check` to pass `--test`. let test = matches!( args.get_one::("profile").map(String::as_str), @@ -51,7 +51,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { ); let mode = CompileMode::Check { test }; let compile_opts = - args.compile_options(config, mode, Some(&ws), ProfileChecking::LegacyTestOnly)?; + args.compile_options(gctx, mode, Some(&ws), ProfileChecking::LegacyTestOnly)?; ops::compile(&ws, &compile_opts)?; Ok(()) diff --git a/src/bin/cargo/commands/clean.rs b/src/bin/cargo/commands/clean.rs index c7b7f98c301..e358b967150 100644 --- a/src/bin/cargo/commands/clean.rs +++ b/src/bin/cargo/commands/clean.rs @@ -125,10 +125,10 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { match args.subcommand() { Some(("gc", args)) => { - return gc(config, args); + return gc(gctx, args); } Some((cmd, _)) => { unreachable!("unexpected command {}", cmd) @@ -136,17 +136,17 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { None => {} } - let ws = args.workspace(config)?; + let ws = args.workspace(gctx)?; if args.is_present_with_zero_values("package") { print_available_packages(&ws)?; } let opts = CleanOptions { - config, + gctx, spec: values(args, "package"), targets: args.targets()?, - requested_profile: args.get_profile_name(config, "dev", ProfileChecking::Custom)?, + requested_profile: args.get_profile_name(gctx, "dev", ProfileChecking::Custom)?, profile_specified: args.contains_id("profile") || args.flag("release"), doc: args.flag("doc"), dry_run: args.dry_run(), @@ -155,13 +155,13 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { Ok(()) } -fn gc(config: &Config, args: &ArgMatches) -> CliResult { - config.cli_unstable().fail_if_stable_command( - config, +fn gc(gctx: &GlobalContext, args: &ArgMatches) -> CliResult { + gctx.cli_unstable().fail_if_stable_command( + gctx, "clean gc", 12633, "gc", - config.cli_unstable().gc, + gctx.cli_unstable().gc, )?; let size_opt = |opt| -> Option { args.get_one::(opt).copied() }; @@ -183,13 +183,13 @@ fn gc(config: &Config, args: &ArgMatches) -> CliResult { // If the user sets any options, then only perform the options requested. // If no options are set, do the default behavior. if !gc_opts.is_download_cache_opt_set() { - gc_opts.update_for_auto_gc(config)?; + gc_opts.update_for_auto_gc(gctx)?; } - let _lock = config.acquire_package_cache_lock(CacheLockMode::MutateExclusive)?; - let mut cache_track = GlobalCacheTracker::new(&config)?; - let mut gc = Gc::new(config, &mut cache_track)?; - let mut clean_ctx = CleanContext::new(config); + let _lock = gctx.acquire_package_cache_lock(CacheLockMode::MutateExclusive)?; + let mut cache_track = GlobalCacheTracker::new(&gctx)?; + let mut gc = Gc::new(gctx, &mut cache_track)?; + let mut clean_ctx = CleanContext::new(gctx); clean_ctx.dry_run = args.dry_run(); gc.gc(&mut clean_ctx, &gc_opts)?; clean_ctx.display_summary()?; diff --git a/src/bin/cargo/commands/config.rs b/src/bin/cargo/commands/config.rs index feea9ed2876..ed120143d1a 100644 --- a/src/bin/cargo/commands/config.rs +++ b/src/bin/cargo/commands/config.rs @@ -30,13 +30,13 @@ pub fn cli() -> Command { ) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - config.cli_unstable().fail_if_stable_command( - config, +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + gctx.cli_unstable().fail_if_stable_command( + gctx, "config", 9301, "unstable-options", - config.cli_unstable().unstable_options, + gctx.cli_unstable().unstable_options, )?; match args.subcommand() { Some(("get", args)) => { @@ -46,7 +46,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { show_origin: args.flag("show-origin"), merged: args.get_one::("merged").map(String::as_str) == Some("yes"), }; - cargo_config::get(config, &opts)?; + cargo_config::get(gctx, &opts)?; } Some((cmd, _)) => { unreachable!("unexpected command {}", cmd) diff --git a/src/bin/cargo/commands/doc.rs b/src/bin/cargo/commands/doc.rs index de918fb1f24..2b087e2dbb3 100644 --- a/src/bin/cargo/commands/doc.rs +++ b/src/bin/cargo/commands/doc.rs @@ -45,14 +45,13 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; let mode = CompileMode::Doc { deps: !args.flag("no-deps"), json: false, }; - let mut compile_opts = - args.compile_options(config, mode, Some(&ws), ProfileChecking::Custom)?; + let mut compile_opts = args.compile_options(gctx, mode, Some(&ws), ProfileChecking::Custom)?; compile_opts.rustdoc_document_private_items = args.flag("document-private-items"); let doc_opts = DocOptions { diff --git a/src/bin/cargo/commands/fetch.rs b/src/bin/cargo/commands/fetch.rs index 1c25204e372..f60ed61b854 100644 --- a/src/bin/cargo/commands/fetch.rs +++ b/src/bin/cargo/commands/fetch.rs @@ -14,11 +14,11 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; let opts = FetchOptions { - config, + gctx, targets: args.targets()?, }; let _ = ops::fetch(&ws, &opts)?; diff --git a/src/bin/cargo/commands/fix.rs b/src/bin/cargo/commands/fix.rs index 93df738e137..174df5fa3d9 100644 --- a/src/bin/cargo/commands/fix.rs +++ b/src/bin/cargo/commands/fix.rs @@ -59,8 +59,8 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; // This is a legacy behavior that causes `cargo fix` to pass `--test`. let test = matches!( args.get_one::("profile").map(String::as_str), @@ -70,8 +70,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { // Unlike other commands default `cargo fix` to all targets to fix as much // code as we can. - let mut opts = - args.compile_options(config, mode, Some(&ws), ProfileChecking::LegacyTestOnly)?; + let mut opts = args.compile_options(gctx, mode, Some(&ws), ProfileChecking::LegacyTestOnly)?; if !opts.filter.is_specific() { // cargo fix with no target selection implies `--all-targets`. diff --git a/src/bin/cargo/commands/generate_lockfile.rs b/src/bin/cargo/commands/generate_lockfile.rs index 3617e38f4b0..d1a95fda036 100644 --- a/src/bin/cargo/commands/generate_lockfile.rs +++ b/src/bin/cargo/commands/generate_lockfile.rs @@ -12,8 +12,8 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; ops::generate_lockfile(&ws)?; Ok(()) } diff --git a/src/bin/cargo/commands/git_checkout.rs b/src/bin/cargo/commands/git_checkout.rs index 90be9bc5518..0f309805651 100644 --- a/src/bin/cargo/commands/git_checkout.rs +++ b/src/bin/cargo/commands/git_checkout.rs @@ -9,6 +9,6 @@ pub fn cli() -> Command { .override_help(REMOVED) } -pub fn exec(_config: &mut Config, _args: &ArgMatches) -> CliResult { +pub fn exec(_gctx: &mut GlobalContext, _args: &ArgMatches) -> CliResult { Err(anyhow::format_err!(REMOVED).into()) } diff --git a/src/bin/cargo/commands/help.rs b/src/bin/cargo/commands/help.rs index 7f4fe7a34e3..86e602d108c 100644 --- a/src/bin/cargo/commands/help.rs +++ b/src/bin/cargo/commands/help.rs @@ -1,7 +1,7 @@ use crate::aliased_command; use crate::command_prelude::*; -use cargo::drop_println; use cargo::util::errors::CargoResult; +use cargo::{drop_println, GlobalContext}; use cargo_util::paths::resolve_executable; use flate2::read::GzDecoder; use std::ffi::OsStr; @@ -18,20 +18,20 @@ pub fn cli() -> Command { .arg(Arg::new("COMMAND").action(ArgAction::Set)) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { let subcommand = args.get_one::("COMMAND"); if let Some(subcommand) = subcommand { - if !try_help(config, subcommand)? { + if !try_help(gctx, subcommand)? { match check_builtin(&subcommand) { Some(s) => { crate::execute_internal_subcommand( - config, + gctx, &[OsStr::new(s), OsStr::new("--help")], )?; } None => { crate::execute_external_subcommand( - config, + gctx, subcommand, &[OsStr::new(subcommand), OsStr::new("--help")], )?; @@ -45,12 +45,12 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { Ok(()) } -fn try_help(config: &Config, subcommand: &str) -> CargoResult { - let subcommand = match check_alias(config, subcommand) { +fn try_help(gctx: &GlobalContext, subcommand: &str) -> CargoResult { + let subcommand = match check_alias(gctx, subcommand) { // If this alias is more than a simple subcommand pass-through, show the alias. Some(argv) if argv.len() > 1 => { let alias = argv.join(" "); - drop_println!(config, "`{}` is aliased to `{}`", subcommand, alias); + drop_println!(gctx, "`{}` is aliased to `{}`", subcommand, alias); return Ok(true); } // Otherwise, resolve the alias into its subcommand. @@ -92,8 +92,8 @@ fn try_help(config: &Config, subcommand: &str) -> CargoResult { /// Checks if the given subcommand is an alias. /// /// Returns None if it is not an alias. -fn check_alias(config: &Config, subcommand: &str) -> Option> { - aliased_command(config, subcommand).ok().flatten() +fn check_alias(gctx: &GlobalContext, subcommand: &str) -> Option> { + aliased_command(gctx, subcommand).ok().flatten() } /// Checks if the given subcommand is a built-in command (not via an alias). diff --git a/src/bin/cargo/commands/init.rs b/src/bin/cargo/commands/init.rs index 763e410e5d7..614a200799d 100644 --- a/src/bin/cargo/commands/init.rs +++ b/src/bin/cargo/commands/init.rs @@ -19,8 +19,8 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let opts = args.new_options(config)?; - ops::init(&opts, config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let opts = args.new_options(gctx)?; + ops::init(&opts, gctx)?; Ok(()) } diff --git a/src/bin/cargo/commands/install.rs b/src/bin/cargo/commands/install.rs index 04639fbcd55..86146098067 100644 --- a/src/bin/cargo/commands/install.rs +++ b/src/bin/cargo/commands/install.rs @@ -99,13 +99,13 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let path = args.value_of_path("path", config); +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let path = args.value_of_path("path", gctx); if let Some(path) = &path { - config.reload_rooted_at(path)?; + gctx.reload_rooted_at(path)?; } else { // TODO: Consider calling set_search_stop_path(home). - config.reload_rooted_at(config.home().clone().into_path_unlocked())?; + gctx.reload_rooted_at(gctx.home().clone().into_path_unlocked())?; } // In general, we try to avoid normalizing paths in Cargo, @@ -161,14 +161,14 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { SourceId::for_path(path)? } else if krates.is_empty() { from_cwd = true; - SourceId::for_path(config.cwd())? - } else if let Some(reg_or_index) = args.registry_or_index(config)? { + SourceId::for_path(gctx.cwd())? + } else if let Some(reg_or_index) = args.registry_or_index(gctx)? { match reg_or_index { - ops::RegistryOrIndex::Registry(r) => SourceId::alt_registry(config, &r)?, + ops::RegistryOrIndex::Registry(r) => SourceId::alt_registry(gctx, &r)?, ops::RegistryOrIndex::Index(url) => SourceId::for_registry(&url)?, } } else { - SourceId::crates_io(config)? + SourceId::crates_io(gctx)? }; let root = args.get_one::("root").map(String::as_str); @@ -181,28 +181,28 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { // This workspace information is for emitting helpful messages from // `ArgMatchesExt::compile_options` and won't affect the actual compilation. let workspace = if from_cwd { - args.workspace(config).ok() + args.workspace(gctx).ok() } else if let Some(path) = &path { - Workspace::new(&path.join("Cargo.toml"), config).ok() + Workspace::new(&path.join("Cargo.toml"), gctx).ok() } else { None }; let mut compile_opts = args.compile_options( - config, + gctx, CompileMode::Build, workspace.as_ref(), ProfileChecking::Custom, )?; compile_opts.build_config.requested_profile = - args.get_profile_name(config, "release", ProfileChecking::Custom)?; + args.get_profile_name(gctx, "release", ProfileChecking::Custom)?; if args.flag("list") { - ops::install_list(root, config)?; + ops::install_list(root, gctx)?; } else { ops::install( - config, + gctx, root, krates, source, diff --git a/src/bin/cargo/commands/locate_project.rs b/src/bin/cargo/commands/locate_project.rs index 1f1b87e2e0c..d38d70c0f13 100644 --- a/src/bin/cargo/commands/locate_project.rs +++ b/src/bin/cargo/commands/locate_project.rs @@ -26,16 +26,16 @@ pub struct ProjectLocation<'a> { root: &'a str, } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { let root_manifest; let workspace; let root = match WhatToFind::parse(args) { WhatToFind::CurrentManifest => { - root_manifest = args.root_manifest(config)?; + root_manifest = args.root_manifest(gctx)?; &root_manifest } WhatToFind::Workspace => { - workspace = args.workspace(config)?; + workspace = args.workspace(gctx)?; workspace.root_manifest() } }; @@ -53,8 +53,8 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let location = ProjectLocation { root }; match MessageFormat::parse(args)? { - MessageFormat::Json => config.shell().print_json(&location)?, - MessageFormat::Plain => drop_println!(config, "{}", location.root), + MessageFormat::Json => gctx.shell().print_json(&location)?, + MessageFormat::Plain => drop_println!(gctx, "{}", location.root), } Ok(()) diff --git a/src/bin/cargo/commands/login.rs b/src/bin/cargo/commands/login.rs index d6fc6d55d9f..2542386aa56 100644 --- a/src/bin/cargo/commands/login.rs +++ b/src/bin/cargo/commands/login.rs @@ -20,8 +20,8 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let reg = args.registry_or_index(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let reg = args.registry_or_index(gctx)?; assert!( !matches!(reg, Some(RegistryOrIndex::Index(..))), "must not be index URL" @@ -33,7 +33,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { .map(String::as_str) .collect::>(); ops::registry_login( - config, + gctx, args.get_one::("token").map(|s| s.as_str().into()), reg.as_ref(), &extra_args, diff --git a/src/bin/cargo/commands/logout.rs b/src/bin/cargo/commands/logout.rs index cef9311a88d..57df53ad737 100644 --- a/src/bin/cargo/commands/logout.rs +++ b/src/bin/cargo/commands/logout.rs @@ -13,13 +13,13 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let reg = args.registry_or_index(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let reg = args.registry_or_index(gctx)?; assert!( !matches!(reg, Some(RegistryOrIndex::Index(..))), "must not be index URL" ); - ops::registry_logout(config, reg)?; + ops::registry_logout(gctx, reg)?; Ok(()) } diff --git a/src/bin/cargo/commands/metadata.rs b/src/bin/cargo/commands/metadata.rs index 6642114209c..83232ef47f2 100644 --- a/src/bin/cargo/commands/metadata.rs +++ b/src/bin/cargo/commands/metadata.rs @@ -31,12 +31,12 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; let version = match args.get_one::("format-version") { None => { - config.shell().warn( + gctx.shell().warn( "please specify `--format-version` flag explicitly \ to avoid compatibility problems", )?; @@ -53,6 +53,6 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { }; let result = ops::output_metadata(&ws, &options)?; - config.shell().print_json(&result)?; + gctx.shell().print_json(&result)?; Ok(()) } diff --git a/src/bin/cargo/commands/mod.rs b/src/bin/cargo/commands/mod.rs index b9da0e5fb35..02c3438dc47 100644 --- a/src/bin/cargo/commands/mod.rs +++ b/src/bin/cargo/commands/mod.rs @@ -43,7 +43,7 @@ pub fn builtin() -> Vec { ] } -pub type Exec = fn(&mut Config, &ArgMatches) -> CliResult; +pub type Exec = fn(&mut GlobalContext, &ArgMatches) -> CliResult; pub fn builtin_exec(cmd: &str) -> Option { let f = match cmd { diff --git a/src/bin/cargo/commands/new.rs b/src/bin/cargo/commands/new.rs index e47ed523a77..35a85e0499b 100644 --- a/src/bin/cargo/commands/new.rs +++ b/src/bin/cargo/commands/new.rs @@ -19,9 +19,9 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let opts = args.new_options(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let opts = args.new_options(gctx)?; - ops::new(&opts, config)?; + ops::new(&opts, gctx)?; Ok(()) } diff --git a/src/bin/cargo/commands/owner.rs b/src/bin/cargo/commands/owner.rs index 45f34bc8e80..659408758df 100644 --- a/src/bin/cargo/commands/owner.rs +++ b/src/bin/cargo/commands/owner.rs @@ -33,11 +33,11 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { let opts = OwnersOptions { krate: args.get_one::("crate").cloned(), token: args.get_one::("token").cloned().map(Secret::from), - reg_or_index: args.registry_or_index(config)?, + reg_or_index: args.registry_or_index(gctx)?, to_add: args .get_many::("add") .map(|xs| xs.cloned().collect()), @@ -46,6 +46,6 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { .map(|xs| xs.cloned().collect()), list: args.flag("list"), }; - ops::modify_owners(config, &opts)?; + ops::modify_owners(gctx, &opts)?; Ok(()) } diff --git a/src/bin/cargo/commands/package.rs b/src/bin/cargo/commands/package.rs index 59a3c8f667f..27b48097c6a 100644 --- a/src/bin/cargo/commands/package.rs +++ b/src/bin/cargo/commands/package.rs @@ -40,8 +40,8 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; if ws.root_maybe().is_embedded() { return Err(anyhow::format_err!( "{} is unsupported by `cargo package`", @@ -54,7 +54,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { ops::package( &ws, &PackageOpts { - config, + gctx, verify: !args.flag("no-verify"), list: args.flag("list"), check_metadata: !args.flag("no-metadata"), diff --git a/src/bin/cargo/commands/pkgid.rs b/src/bin/cargo/commands/pkgid.rs index f1494af0075..72abbfc0788 100644 --- a/src/bin/cargo/commands/pkgid.rs +++ b/src/bin/cargo/commands/pkgid.rs @@ -15,8 +15,8 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; if ws.root_maybe().is_embedded() { return Err(anyhow::format_err!( "{} is unsupported by `cargo pkgid`", @@ -32,6 +32,6 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { .or_else(|| args.get_one::("package")) .map(String::as_str); let spec = ops::pkgid(&ws, spec)?; - cargo::drop_println!(config, "{}", spec); + cargo::drop_println!(gctx, "{}", spec); Ok(()) } diff --git a/src/bin/cargo/commands/publish.rs b/src/bin/cargo/commands/publish.rs index af5bf744785..3b497e1ed12 100644 --- a/src/bin/cargo/commands/publish.rs +++ b/src/bin/cargo/commands/publish.rs @@ -29,9 +29,9 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let reg_or_index = args.registry_or_index(config)?; - let ws = args.workspace(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let reg_or_index = args.registry_or_index(gctx)?; + let ws = args.workspace(gctx)?; if ws.root_maybe().is_embedded() { return Err(anyhow::format_err!( "{} is unsupported by `cargo publish`", @@ -43,7 +43,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { ops::publish( &ws, &PublishOpts { - config, + gctx, token: args .get_one::("token") .map(|s| s.to_string().into()), diff --git a/src/bin/cargo/commands/read_manifest.rs b/src/bin/cargo/commands/read_manifest.rs index 8cfd9b34e8b..b86bbf795bc 100644 --- a/src/bin/cargo/commands/read_manifest.rs +++ b/src/bin/cargo/commands/read_manifest.rs @@ -13,8 +13,8 @@ Deprecated, use `cargo metadata --no-deps` instead.\ .arg_manifest_path() } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; - config.shell().print_json(&ws.current()?.serialized())?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; + gctx.shell().print_json(&ws.current()?.serialized())?; Ok(()) } diff --git a/src/bin/cargo/commands/remove.rs b/src/bin/cargo/commands/remove.rs index b7abb171510..34e31e9fae4 100644 --- a/src/bin/cargo/commands/remove.rs +++ b/src/bin/cargo/commands/remove.rs @@ -56,10 +56,10 @@ pub fn cli() -> clap::Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { let dry_run = args.dry_run(); - let workspace = args.workspace(config)?; + let workspace = args.workspace(gctx)?; if args.is_present_with_zero_values("package") { print_available_packages(&workspace)?; @@ -100,7 +100,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let section = parse_section(args); let options = RemoveOptions { - config, + gctx, spec, dependencies, section, @@ -113,22 +113,22 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { gc_workspace(&workspace)?; // Reload the workspace since we've changed dependencies - let ws = args.workspace(config)?; + let ws = args.workspace(gctx)?; let resolve = { // HACK: Avoid unused patch warnings by temporarily changing the verbosity. // In rare cases, this might cause index update messages to not show up - let verbosity = ws.config().shell().verbosity(); - ws.config() + let verbosity = ws.gctx().shell().verbosity(); + ws.gctx() .shell() .set_verbosity(cargo::core::Verbosity::Quiet); let resolve = resolve_ws(&ws); - ws.config().shell().set_verbosity(verbosity); + ws.gctx().shell().set_verbosity(verbosity); resolve?.1 }; // Attempt to gc unused patches and re-resolve if anything is removed if gc_unused_patches(&workspace, &resolve)? { - let ws = args.workspace(config)?; + let ws = args.workspace(gctx)?; resolve_ws(&ws)?; } } @@ -243,7 +243,7 @@ fn gc_workspace(workspace: &Workspace<'_>) -> CargoResult<()> { if !spec_has_match( &PackageIdSpec::parse(key)?, &dependencies, - workspace.config(), + workspace.gctx(), )? { *item = toml_edit::Item::None; is_modified = true; @@ -262,7 +262,7 @@ fn gc_workspace(workspace: &Workspace<'_>) -> CargoResult<()> { if !spec_has_match( &PackageIdSpec::parse(key.get())?, &dependencies, - workspace.config(), + workspace.gctx(), )? { *item = toml_edit::Item::None; is_modified = true; @@ -284,7 +284,7 @@ fn gc_workspace(workspace: &Workspace<'_>) -> CargoResult<()> { fn spec_has_match( spec: &PackageIdSpec, dependencies: &[Dependency], - config: &Config, + gctx: &GlobalContext, ) -> CargoResult { for dep in dependencies { if spec.name() != &dep.name { @@ -300,7 +300,7 @@ fn spec_has_match( continue; } - match dep.source_id(config)? { + match dep.source_id(gctx)? { MaybeWorkspace::Other(source_id) => { if spec.url().map(|u| u == source_id.url()).unwrap_or(true) { return Ok(true); @@ -332,7 +332,7 @@ fn gc_unused_patches(workspace: &Workspace<'_>, resolve: &Resolve) -> CargoResul // Generate a PackageIdSpec url for querying let url = if let MaybeWorkspace::Other(source_id) = - dep.source_id(workspace.config())? + dep.source_id(workspace.gctx())? { format!("{}#{}", source_id.url(), dep.name) } else { diff --git a/src/bin/cargo/commands/report.rs b/src/bin/cargo/commands/report.rs index bc00d5bb830..75918699b1e 100644 --- a/src/bin/cargo/commands/report.rs +++ b/src/bin/cargo/commands/report.rs @@ -25,9 +25,9 @@ pub fn cli() -> Command { ) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { match args.subcommand() { - Some(("future-incompatibilities", args)) => report_future_incompatibilities(config, args), + Some(("future-incompatibilities", args)) => report_future_incompatibilities(gctx, args), Some((cmd, _)) => { unreachable!("unexpected command {}", cmd) } @@ -37,15 +37,15 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { } } -fn report_future_incompatibilities(config: &Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; +fn report_future_incompatibilities(gctx: &GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; let reports = OnDiskReports::load(&ws)?; let id = args .value_of_u32("id")? .unwrap_or_else(|| reports.last_id()); let krate = args.get_one::("package").map(String::as_str); let report = reports.get_report(id, krate)?; - drop_println!(config, "{}", REPORT_PREAMBLE); - drop(config.shell().print_ansi_stdout(report.as_bytes())); + drop_println!(gctx, "{}", REPORT_PREAMBLE); + drop(gctx.shell().print_ansi_stdout(report.as_bytes())); Ok(()) } diff --git a/src/bin/cargo/commands/run.rs b/src/bin/cargo/commands/run.rs index 50153ceeefd..981eac9d2de 100644 --- a/src/bin/cargo/commands/run.rs +++ b/src/bin/cargo/commands/run.rs @@ -46,15 +46,11 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; - let mut compile_opts = args.compile_options( - config, - CompileMode::Build, - Some(&ws), - ProfileChecking::Custom, - )?; + let mut compile_opts = + args.compile_options(gctx, CompileMode::Build, Some(&ws), ProfileChecking::Custom)?; // Disallow `spec` to be an glob pattern if let Packages::Packages(opt_in) = &compile_opts.spec { @@ -87,7 +83,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { } }; - ops::run(&ws, &compile_opts, &values_os(args, "args")).map_err(|err| to_run_error(config, err)) + ops::run(&ws, &compile_opts, &values_os(args, "args")).map_err(|err| to_run_error(gctx, err)) } /// See also `util/toml/mod.rs`s `is_embedded` @@ -98,15 +94,15 @@ pub fn is_manifest_command(arg: &str) -> bool { || path.file_name() == Some(OsStr::new("Cargo.toml")) } -pub fn exec_manifest_command(config: &mut Config, cmd: &str, args: &[OsString]) -> CliResult { +pub fn exec_manifest_command(gctx: &mut GlobalContext, cmd: &str, args: &[OsString]) -> CliResult { let manifest_path = Path::new(cmd); - match (manifest_path.is_file(), config.cli_unstable().script) { + match (manifest_path.is_file(), gctx.cli_unstable().script) { (true, true) => {} (true, false) => { return Err(anyhow::anyhow!("running the file `{cmd}` requires `-Zscript`").into()); } (false, true) => { - let possible_commands = crate::list_commands(config); + let possible_commands = crate::list_commands(gctx); let is_dir = if manifest_path.is_dir() { format!("\n\t`{cmd}` is a directory") } else { @@ -143,7 +139,7 @@ pub fn exec_manifest_command(config: &mut Config, cmd: &str, args: &[OsString]) (false, false) => { // HACK: duplicating the above for minor tweaks but this will all go away on // stabilization - let possible_commands = crate::list_commands(config); + let possible_commands = crate::list_commands(gctx); let suggested_command = if let Some(suggested_command) = possible_commands .keys() .filter(|c| cmd.starts_with(c.as_str())) @@ -174,25 +170,25 @@ pub fn exec_manifest_command(config: &mut Config, cmd: &str, args: &[OsString]) } } - let manifest_path = root_manifest(Some(manifest_path), config)?; + let manifest_path = root_manifest(Some(manifest_path), gctx)?; // Treat `cargo foo.rs` like `cargo install --path foo` and re-evaluate the config based on the // location where the script resides, rather than the environment from where it's being run. let parent_path = manifest_path .parent() .expect("a file should always have a parent"); - config.reload_rooted_at(parent_path)?; + gctx.reload_rooted_at(parent_path)?; - let mut ws = Workspace::new(&manifest_path, config)?; - if config.cli_unstable().avoid_dev_deps { + let mut ws = Workspace::new(&manifest_path, gctx)?; + if gctx.cli_unstable().avoid_dev_deps { ws.set_require_optional_deps(false); } let mut compile_opts = - cargo::ops::CompileOptions::new(config, cargo::core::compiler::CompileMode::Build)?; + cargo::ops::CompileOptions::new(gctx, cargo::core::compiler::CompileMode::Build)?; compile_opts.spec = cargo::ops::Packages::Default; - cargo::ops::run(&ws, &compile_opts, args).map_err(|err| to_run_error(config, err)) + cargo::ops::run(&ws, &compile_opts, args).map_err(|err| to_run_error(gctx, err)) } fn suggested_script(cmd: &str) -> Option { @@ -228,7 +224,7 @@ fn suggested_script(cmd: &str) -> Option { } } -fn to_run_error(config: &cargo::util::Config, err: anyhow::Error) -> CliError { +fn to_run_error(gctx: &GlobalContext, err: anyhow::Error) -> CliError { let proc_err = match err.downcast_ref::() { Some(e) => e, None => return CliError::new(err, 101), @@ -244,7 +240,7 @@ fn to_run_error(config: &cargo::util::Config, err: anyhow::Error) -> CliError { // If `-q` was passed then we suppress extra error information about // a failed process, we assume the process itself printed out enough // information about why it failed so we don't do so as well - let is_quiet = config.shell().verbosity() == Verbosity::Quiet; + let is_quiet = gctx.shell().verbosity() == Verbosity::Quiet; if is_quiet { CliError::code(exit_code) } else { diff --git a/src/bin/cargo/commands/rustc.rs b/src/bin/cargo/commands/rustc.rs index 7e5370be3a5..e4dd308c092 100644 --- a/src/bin/cargo/commands/rustc.rs +++ b/src/bin/cargo/commands/rustc.rs @@ -58,8 +58,8 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; // This is a legacy behavior that changes the behavior based on the profile. // If we want to support this more formally, I think adding a --mode flag // would be warranted. @@ -70,7 +70,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { _ => CompileMode::Build, }; let mut compile_opts = args.compile_options_for_single_package( - config, + gctx, mode, Some(&ws), ProfileChecking::LegacyRustc, @@ -85,8 +85,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { Some(target_args) }; if let Some(opt_value) = args.get_one::(PRINT_ARG_NAME) { - config - .cli_unstable() + gctx.cli_unstable() .fail_if_stable_opt(PRINT_ARG_NAME, 9357)?; ops::print(&ws, &compile_opts, opt_value)?; return Ok(()); diff --git a/src/bin/cargo/commands/rustdoc.rs b/src/bin/cargo/commands/rustdoc.rs index 600247d0e71..bc9fa000e60 100644 --- a/src/bin/cargo/commands/rustdoc.rs +++ b/src/bin/cargo/commands/rustdoc.rs @@ -51,11 +51,10 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; let output_format = if let Some(output_format) = args._value_of("output-format") { - config - .cli_unstable() + gctx.cli_unstable() .fail_if_stable_opt("--output-format", 12103)?; output_format.parse()? } else { @@ -63,7 +62,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { }; let mut compile_opts = args.compile_options_for_single_package( - config, + gctx, CompileMode::Doc { deps: false, json: matches!(output_format, OutputFormat::Json), diff --git a/src/bin/cargo/commands/search.rs b/src/bin/cargo/commands/search.rs index 77394242bf3..f51f59d2dd9 100644 --- a/src/bin/cargo/commands/search.rs +++ b/src/bin/cargo/commands/search.rs @@ -23,8 +23,8 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let reg_or_index = args.registry_or_index(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let reg_or_index = args.registry_or_index(gctx)?; let limit = args.value_of_u32("limit")?; let limit = min(100, limit.unwrap_or(10)); let query: Vec<&str> = args @@ -33,6 +33,6 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { .map(String::as_str) .collect(); let query: String = query.join("+"); - ops::search(&query, config, reg_or_index, limit)?; + ops::search(&query, gctx, reg_or_index, limit)?; Ok(()) } diff --git a/src/bin/cargo/commands/test.rs b/src/bin/cargo/commands/test.rs index 2db032212ed..4b70d42b5ca 100644 --- a/src/bin/cargo/commands/test.rs +++ b/src/bin/cargo/commands/test.rs @@ -64,18 +64,14 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; - let mut compile_opts = args.compile_options( - config, - CompileMode::Test, - Some(&ws), - ProfileChecking::Custom, - )?; + let mut compile_opts = + args.compile_options(gctx, CompileMode::Test, Some(&ws), ProfileChecking::Custom)?; compile_opts.build_config.requested_profile = - args.get_profile_name(config, "test", ProfileChecking::Custom)?; + args.get_profile_name(gctx, "test", ProfileChecking::Custom)?; // `TESTNAME` is actually an argument of the test binary, but it's // important, so we explicitly mention it and reconfigure. diff --git a/src/bin/cargo/commands/tree.rs b/src/bin/cargo/commands/tree.rs index 30cf4fe3a2d..aa1b526758d 100644 --- a/src/bin/cargo/commands/tree.rs +++ b/src/bin/cargo/commands/tree.rs @@ -101,21 +101,19 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { if args.flag("version") { let verbose = args.verbose() > 0; let version = cli::get_version_string(verbose); - cargo::drop_print!(config, "{}", version); + cargo::drop_print!(gctx, "{}", version); return Ok(()); } let prefix = if args.flag("no-indent") { - config - .shell() + gctx.shell() .warn("the --no-indent flag has been changed to --prefix=none")?; "none" } else if args.flag("prefix-depth") { - config - .shell() + gctx.shell() .warn("the --prefix-depth flag has been changed to --prefix=depth")?; "depth" } else { @@ -125,7 +123,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let no_dedupe = args.flag("no-dedupe") || args.flag("all"); if args.flag("all") { - config.shell().warn( + gctx.shell().warn( "The `cargo tree` --all flag has been changed to --no-dedupe, \ and may be removed in a future version.\n\ If you are looking to display all workspace members, use the --workspace flag.", @@ -133,8 +131,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { } let targets = if args.flag("all-targets") { - config - .shell() + gctx.shell() .warn("the --all-targets flag has been changed to --target=all")?; vec!["all".to_string()] } else { @@ -142,7 +139,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { }; let target = tree::Target::from_cli(targets); - let (edge_kinds, no_proc_macro) = parse_edge_kinds(config, args)?; + let (edge_kinds, no_proc_macro) = parse_edge_kinds(gctx, args)?; let graph_features = edge_kinds.contains(&EdgeKind::Feature); let pkgs_to_prune = args._values_of("prune"); @@ -178,7 +175,7 @@ subtree of the package given to -p.\n\ } } - let ws = args.workspace(config)?; + let ws = args.workspace(gctx)?; if args.is_present_with_zero_values("package") { print_available_packages(&ws)?; @@ -214,7 +211,10 @@ subtree of the package given to -p.\n\ /// Parses `--edges` option. /// /// Returns a tuple of `EdgeKind` map and `no_proc_marco` flag. -fn parse_edge_kinds(config: &Config, args: &ArgMatches) -> CargoResult<(HashSet, bool)> { +fn parse_edge_kinds( + gctx: &GlobalContext, + args: &ArgMatches, +) -> CargoResult<(HashSet, bool)> { let (kinds, no_proc_macro) = { let mut no_proc_macro = false; let mut kinds = args.get_many::("edges").map_or_else( @@ -230,8 +230,7 @@ fn parse_edge_kinds(config: &Config, args: &ArgMatches) -> CargoResult<(HashSet< ); if args.flag("no-dev-dependencies") { - config - .shell() + gctx.shell() .warn("the --no-dev-dependencies flag has changed to -e=no-dev")?; kinds.push("no-dev"); } diff --git a/src/bin/cargo/commands/uninstall.rs b/src/bin/cargo/commands/uninstall.rs index 217f22ef334..cad538de9c0 100644 --- a/src/bin/cargo/commands/uninstall.rs +++ b/src/bin/cargo/commands/uninstall.rs @@ -18,7 +18,7 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { let root = args.get_one::("root").map(String::as_str); if args.is_present_with_zero_values("package") { @@ -34,6 +34,6 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { .unwrap_or_else(|| args.get_many::("package").unwrap_or_default()) .map(String::as_str) .collect(); - ops::uninstall(root, specs, &values(args, "bin"), config)?; + ops::uninstall(root, specs, &values(args, "bin"), gctx)?; Ok(()) } diff --git a/src/bin/cargo/commands/update.rs b/src/bin/cargo/commands/update.rs index e11ac45c793..43bd329269c 100644 --- a/src/bin/cargo/commands/update.rs +++ b/src/bin/cargo/commands/update.rs @@ -47,8 +47,8 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let ws = args.workspace(config)?; +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let ws = args.workspace(gctx)?; if args.is_present_with_zero_values("package") { print_available_packages(&ws)?; @@ -76,7 +76,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { to_update, dry_run: args.dry_run(), workspace: args.flag("workspace"), - config, + gctx, }; ops::update_lockfile(&ws, &update_opts)?; Ok(()) diff --git a/src/bin/cargo/commands/vendor.rs b/src/bin/cargo/commands/vendor.rs index a1587848451..efa1f1bb7b6 100644 --- a/src/bin/cargo/commands/vendor.rs +++ b/src/bin/cargo/commands/vendor.rs @@ -54,16 +54,16 @@ fn unsupported(name: &'static str) -> Arg { flag(name, "").value_parser(value_parser).hide(true) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { // We're doing the vendoring operation ourselves, so we don't actually want // to respect any of the `source` configuration in Cargo itself. That's // intended for other consumers of Cargo, but we want to go straight to the // source, e.g. crates.io, to fetch crates. if !args.flag("respect-source-config") { - config.values_mut()?.remove("source"); + gctx.values_mut()?.remove("source"); } - let ws = args.workspace(config)?; + let ws = args.workspace(gctx)?; let path = args .get_one::("path") .cloned() diff --git a/src/bin/cargo/commands/verify_project.rs b/src/bin/cargo/commands/verify_project.rs index 14a5df07d33..d4c655d33b2 100644 --- a/src/bin/cargo/commands/verify_project.rs +++ b/src/bin/cargo/commands/verify_project.rs @@ -13,16 +13,14 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - if let Err(e) = args.workspace(config) { - config - .shell() +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + if let Err(e) = args.workspace(gctx) { + gctx.shell() .print_json(&HashMap::from([("invalid", e.to_string())]))?; process::exit(1) } - config - .shell() + gctx.shell() .print_json(&HashMap::from([("success", "true")]))?; Ok(()) } diff --git a/src/bin/cargo/commands/version.rs b/src/bin/cargo/commands/version.rs index 5a6d710c396..78d3c658fec 100644 --- a/src/bin/cargo/commands/version.rs +++ b/src/bin/cargo/commands/version.rs @@ -10,9 +10,9 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { let verbose = args.verbose() > 0; let version = cli::get_version_string(verbose); - cargo::drop_print!(config, "{}", version); + cargo::drop_print!(gctx, "{}", version); Ok(()) } diff --git a/src/bin/cargo/commands/yank.rs b/src/bin/cargo/commands/yank.rs index 8a69d3eb7c5..5401b32de2f 100644 --- a/src/bin/cargo/commands/yank.rs +++ b/src/bin/cargo/commands/yank.rs @@ -25,7 +25,7 @@ pub fn cli() -> Command { )) } -pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { let (krate, version) = resolve_crate( args.get_one::("crate").map(String::as_str), args.get_one::("version").map(String::as_str), @@ -35,11 +35,11 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { } ops::yank( - config, + gctx, krate.map(|s| s.to_string()), version.map(|s| s.to_string()), args.get_one::("token").cloned().map(Secret::from), - args.registry_or_index(config)?, + args.registry_or_index(gctx)?, args.flag("undo"), )?; Ok(()) diff --git a/src/bin/cargo/main.rs b/src/bin/cargo/main.rs index e90a61b6003..6861237f925 100644 --- a/src/bin/cargo/main.rs +++ b/src/bin/cargo/main.rs @@ -2,7 +2,7 @@ use cargo::util::network::http::http_handle; use cargo::util::network::http::needs_custom_http_transport; -use cargo::util::{self, closest_msg, command_prelude, CargoResult}; +use cargo::util::{self, closest_msg, command_prelude, CargoResult, GlobalContext}; use cargo_util::{ProcessBuilder, ProcessError}; use cargo_util_schemas::manifest::StringOrVec; use std::collections::BTreeMap; @@ -19,17 +19,17 @@ use crate::command_prelude::*; fn main() { setup_logger(); - let mut config = cli::LazyConfig::new(); + let mut lazy_gctx = cli::LazyContext::new(); let result = if let Some(lock_addr) = cargo::ops::fix_get_proxy_lock_addr() { - cargo::ops::fix_exec_rustc(config.get(), &lock_addr).map_err(|e| CliError::from(e)) + cargo::ops::fix_exec_rustc(lazy_gctx.get(), &lock_addr).map_err(|e| CliError::from(e)) } else { let _token = cargo::util::job::setup(); - cli::main(&mut config) + cli::main(&mut lazy_gctx) }; match result { - Err(e) => cargo::exit_with_error(e, &mut config.get_mut().shell()), + Err(e) => cargo::exit_with_error(e, &mut lazy_gctx.get_mut().shell()), Ok(()) => {} } } @@ -63,7 +63,7 @@ fn builtin_aliases_execs(cmd: &str) -> Option<&(&str, &str, &str)> { BUILTIN_ALIASES.iter().find(|alias| alias.0 == cmd) } -/// Resolve the aliased command from the [`Config`] with a given command string. +/// Resolve the aliased command from the [`GlobalContext`] with a given command string. /// /// The search fallback chain is: /// @@ -71,9 +71,9 @@ fn builtin_aliases_execs(cmd: &str) -> Option<&(&str, &str, &str)> { /// 2. If an `Err` occurs (missing key, type mismatch, or any possible error), /// try to get it as an array again. /// 3. If still cannot find any, finds one insides [`BUILTIN_ALIASES`]. -fn aliased_command(config: &Config, command: &str) -> CargoResult>> { +fn aliased_command(gctx: &GlobalContext, command: &str) -> CargoResult>> { let alias_name = format!("alias.{}", command); - let user_alias = match config.get_string(&alias_name) { + let user_alias = match gctx.get_string(&alias_name) { Ok(Some(record)) => Some( record .val @@ -82,7 +82,7 @@ fn aliased_command(config: &Config, command: &str) -> CargoResult None, - Err(_) => config.get::>>(&alias_name)?, + Err(_) => gctx.get::>>(&alias_name)?, }; let result = user_alias.or_else(|| { @@ -92,11 +92,11 @@ fn aliased_command(config: &Config, command: &str) -> CargoResult BTreeMap { +fn list_commands(gctx: &GlobalContext) -> BTreeMap { let prefix = "cargo-"; let suffix = env::consts::EXE_SUFFIX; let mut commands = BTreeMap::new(); - for dir in search_directories(config) { + for dir in search_directories(gctx) { let entries = match fs::read_dir(dir) { Ok(entries) => entries, _ => continue, @@ -142,7 +142,7 @@ fn list_commands(config: &Config) -> BTreeMap { } // Add the user-defined aliases - if let Ok(aliases) = config.get::>("alias") { + if let Ok(aliases) = gctx.get::>("alias") { for (name, target) in aliases.iter() { commands.insert( name.to_string(), @@ -164,20 +164,20 @@ fn list_commands(config: &Config) -> BTreeMap { commands } -fn find_external_subcommand(config: &Config, cmd: &str) -> Option { +fn find_external_subcommand(gctx: &GlobalContext, cmd: &str) -> Option { let command_exe = format!("cargo-{}{}", cmd, env::consts::EXE_SUFFIX); - search_directories(config) + search_directories(gctx) .iter() .map(|dir| dir.join(&command_exe)) .find(|file| is_executable(file)) } -fn execute_external_subcommand(config: &Config, cmd: &str, args: &[&OsStr]) -> CliResult { - let path = find_external_subcommand(config, cmd); +fn execute_external_subcommand(gctx: &GlobalContext, cmd: &str, args: &[&OsStr]) -> CliResult { + let path = find_external_subcommand(gctx, cmd); let command = match path { Some(command) => command, None => { - let script_suggestion = if config.cli_unstable().script + let script_suggestion = if gctx.cli_unstable().script && std::path::Path::new(cmd).is_file() { let sep = std::path::MAIN_SEPARATOR; @@ -192,7 +192,7 @@ fn execute_external_subcommand(config: &Config, cmd: &str, args: &[&OsStr]) -> C Did you mean to invoke `cargo` through `rustup` instead?{script_suggestion}", ) } else { - let suggestions = list_commands(config); + let suggestions = list_commands(gctx); let did_you_mean = closest_msg(cmd, suggestions.keys(), |c| c); anyhow::format_err!( @@ -205,24 +205,28 @@ fn execute_external_subcommand(config: &Config, cmd: &str, args: &[&OsStr]) -> C return Err(CliError::new(err, 101)); } }; - execute_subcommand(config, Some(&command), args) + execute_subcommand(gctx, Some(&command), args) } -fn execute_internal_subcommand(config: &Config, args: &[&OsStr]) -> CliResult { - execute_subcommand(config, None, args) +fn execute_internal_subcommand(gctx: &GlobalContext, args: &[&OsStr]) -> CliResult { + execute_subcommand(gctx, None, args) } // This function is used to execute a subcommand. It is used to execute both // internal and external subcommands. // If `cmd_path` is `None`, then the subcommand is an internal subcommand. -fn execute_subcommand(config: &Config, cmd_path: Option<&PathBuf>, args: &[&OsStr]) -> CliResult { - let cargo_exe = config.cargo_exe()?; +fn execute_subcommand( + gctx: &GlobalContext, + cmd_path: Option<&PathBuf>, + args: &[&OsStr], +) -> CliResult { + let cargo_exe = gctx.cargo_exe()?; let mut cmd = match cmd_path { Some(cmd_path) => ProcessBuilder::new(cmd_path), None => ProcessBuilder::new(&cargo_exe), }; cmd.env(cargo::CARGO_ENV, cargo_exe).args(args); - if let Some(client) = config.jobserver_from_env() { + if let Some(client) = gctx.jobserver_from_env() { cmd.inherit_jobserver(client); } let err = match cmd.exec_replace() { @@ -250,14 +254,14 @@ fn is_executable>(path: P) -> bool { path.as_ref().is_file() } -fn search_directories(config: &Config) -> Vec { - let mut path_dirs = if let Some(val) = config.get_env_os("PATH") { +fn search_directories(gctx: &GlobalContext) -> Vec { + let mut path_dirs = if let Some(val) = gctx.get_env_os("PATH") { env::split_paths(&val).collect() } else { vec![] }; - let home_bin = config.home().clone().into_path_unlocked().join("bin"); + let home_bin = gctx.home().clone().into_path_unlocked().join("bin"); // If any of that PATH elements contains `home_bin`, do not // add it again. This is so that the users can control priority @@ -276,7 +280,7 @@ fn search_directories(config: &Config) -> Vec { } /// Initialize libgit2. -fn init_git(config: &Config) { +fn init_git(gctx: &GlobalContext) { // Disabling the owner validation in git can, in theory, lead to code execution // vulnerabilities. However, libgit2 does not launch executables, which is the foundation of // the original security issue. Meanwhile, issues with refusing to load git repos in @@ -299,7 +303,7 @@ fn init_git(config: &Config) { .expect("set_verify_owner_validation should never fail"); } - init_git_transports(config); + init_git_transports(gctx); } /// Configure libgit2 to use libcurl if necessary. @@ -307,13 +311,13 @@ fn init_git(config: &Config) { /// If the user has a non-default network configuration, then libgit2 will be /// configured to use libcurl instead of the built-in networking support so /// that those configuration settings can be used. -fn init_git_transports(config: &Config) { - match needs_custom_http_transport(config) { +fn init_git_transports(gctx: &GlobalContext) { + match needs_custom_http_transport(gctx) { Ok(true) => {} _ => return, } - let handle = match http_handle(config) { + let handle = match http_handle(gctx) { Ok(handle) => handle, Err(..) => return, }; diff --git a/src/cargo/core/compiler/artifact.rs b/src/cargo/core/compiler/artifact.rs index 1f3b12b5c09..1e0e31fd240 100644 --- a/src/cargo/core/compiler/artifact.rs +++ b/src/cargo/core/compiler/artifact.rs @@ -1,7 +1,7 @@ //! Generate artifact information from unit dependencies for configuring the compiler environment. use crate::core::compiler::unit_graph::UnitDep; -use crate::core::compiler::{Context, CrateType, FileFlavor, Unit}; +use crate::core::compiler::{BuildRunner, CrateType, FileFlavor, Unit}; use crate::core::dependency::ArtifactKind; use crate::core::{Dependency, Target, TargetKind}; use crate::CargoResult; @@ -11,12 +11,12 @@ use std::ffi::OsString; /// Return all environment variables for the given unit-dependencies /// if artifacts are present. pub fn get_env( - cx: &Context<'_, '_>, + build_runner: &BuildRunner<'_, '_>, dependencies: &[UnitDep], ) -> CargoResult> { let mut env = HashMap::new(); for unit_dep in dependencies.iter().filter(|d| d.unit.artifact.is_true()) { - for artifact_path in cx + for artifact_path in build_runner .outputs(&unit_dep.unit)? .iter() .filter_map(|f| (f.flavor == FileFlavor::Normal).then(|| &f.path)) diff --git a/src/cargo/core/compiler/build_config.rs b/src/cargo/core/compiler/build_config.rs index 78cbb6e614e..3e5f32cf9ea 100644 --- a/src/cargo/core/compiler/build_config.rs +++ b/src/cargo/core/compiler/build_config.rs @@ -1,7 +1,7 @@ use crate::core::compiler::CompileKind; use crate::util::config::JobsConfig; use crate::util::interning::InternedString; -use crate::util::{CargoResult, Config, RustfixDiagnosticServer}; +use crate::util::{CargoResult, GlobalContext, RustfixDiagnosticServer}; use anyhow::{bail, Context as _}; use cargo_util::ProcessBuilder; use serde::ser; @@ -64,16 +64,16 @@ impl BuildConfig { /// * `target.$target.linker` /// * `target.$target.libfoo.metadata` pub fn new( - config: &Config, + gctx: &GlobalContext, jobs: Option, keep_going: bool, requested_targets: &[String], mode: CompileMode, ) -> CargoResult { - let cfg = config.build_config()?; - let requested_kinds = CompileKind::from_requested_targets(config, requested_targets)?; - if jobs.is_some() && config.jobserver_from_env().is_some() { - config.shell().warn( + let cfg = gctx.build_config()?; + let requested_kinds = CompileKind::from_requested_targets(gctx, requested_targets)?; + if jobs.is_some() && gctx.jobserver_from_env().is_some() { + gctx.shell().warn( "a `-j` argument was passed to Cargo but Cargo is \ also configured with an external jobserver in \ its environment, ignoring the `-j` parameter", @@ -97,7 +97,7 @@ impl BuildConfig { }, }; - if config.cli_unstable().build_std.is_some() && requested_kinds[0].is_host() { + if gctx.cli_unstable().build_std.is_some() && requested_kinds[0].is_host() { // TODO: This should eventually be fixed. anyhow::bail!("-Zbuild-std requires --target"); } diff --git a/src/cargo/core/compiler/build_context/mod.rs b/src/cargo/core/compiler/build_context/mod.rs index 19dee718b02..26b134acfdb 100644 --- a/src/cargo/core/compiler/build_context/mod.rs +++ b/src/cargo/core/compiler/build_context/mod.rs @@ -5,7 +5,7 @@ use crate::core::compiler::{BuildConfig, CompileKind, Unit}; use crate::core::profiles::Profiles; use crate::core::PackageSet; use crate::core::Workspace; -use crate::util::config::Config; +use crate::util::config::GlobalContext; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::Rustc; @@ -20,7 +20,7 @@ pub use self::target_info::{ /// before it gets started. /// /// It is intended that this is mostly static information. Stuff that mutates -/// during the build can be found in the parent [`Context`]. (I say mostly, +/// during the build can be found in the parent [`BuildRunner`]. (I say mostly, /// because this has internal caching, but nothing that should be observable /// or require &mut.) /// @@ -39,16 +39,16 @@ pub use self::target_info::{ /// since it is often too lower-level. /// Instead, [`ops::create_bcx`] is usually what you are looking for. /// -/// After a `BuildContext` is built, the next stage of building is handled in [`Context`]. +/// After a `BuildContext` is built, the next stage of building is handled in [`BuildRunner`]. /// -/// [`Context`]: crate::core::compiler::Context +/// [`BuildRunner`]: crate::core::compiler::BuildRunner /// [`ops::create_bcx`]: crate::ops::create_bcx -pub struct BuildContext<'a, 'cfg> { +pub struct BuildContext<'a, 'gctx> { /// The workspace the build is for. - pub ws: &'a Workspace<'cfg>, + pub ws: &'a Workspace<'gctx>, - /// The cargo configuration. - pub config: &'cfg Config, + /// The cargo context. + pub gctx: &'gctx GlobalContext, /// This contains a collection of compiler flags presets. pub profiles: Profiles, @@ -62,10 +62,10 @@ pub struct BuildContext<'a, 'cfg> { /// Package downloader. /// /// This holds ownership of the `Package` objects. - pub packages: PackageSet<'cfg>, + pub packages: PackageSet<'gctx>, /// Information about rustc and the target platform. - pub target_data: RustcTargetData<'cfg>, + pub target_data: RustcTargetData<'gctx>, /// The root units of `unit_graph` (units requested on the command-line). pub roots: Vec, @@ -80,18 +80,18 @@ pub struct BuildContext<'a, 'cfg> { pub all_kinds: HashSet, } -impl<'a, 'cfg> BuildContext<'a, 'cfg> { +impl<'a, 'gctx> BuildContext<'a, 'gctx> { pub fn new( - ws: &'a Workspace<'cfg>, - packages: PackageSet<'cfg>, + ws: &'a Workspace<'gctx>, + packages: PackageSet<'gctx>, build_config: &'a BuildConfig, profiles: Profiles, extra_compiler_args: HashMap>, - target_data: RustcTargetData<'cfg>, + target_data: RustcTargetData<'gctx>, roots: Vec, unit_graph: UnitGraph, scrape_units: Vec, - ) -> CargoResult> { + ) -> CargoResult> { let all_kinds = unit_graph .keys() .map(|u| u.kind) @@ -101,7 +101,7 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> { Ok(BuildContext { ws, - config: ws.config(), + gctx: ws.gctx(), packages, build_config, profiles, diff --git a/src/cargo/core/compiler/build_context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs index c3b3dd48a11..864196e1ca1 100644 --- a/src/cargo/core/compiler/build_context/target_info.rs +++ b/src/cargo/core/compiler/build_context/target_info.rs @@ -9,10 +9,10 @@ use crate::core::compiler::apply_env_config; use crate::core::compiler::{ - BuildOutput, CompileKind, CompileMode, CompileTarget, Context, CrateType, + BuildOutput, BuildRunner, CompileKind, CompileMode, CompileTarget, CrateType, }; use crate::core::{Dependency, Package, Target, TargetKind, Workspace}; -use crate::util::config::{Config, StringList, TargetConfig}; +use crate::util::config::{GlobalContext, StringList, TargetConfig}; use crate::util::interning::InternedString; use crate::util::{CargoResult, Rustc}; use anyhow::Context as _; @@ -151,19 +151,13 @@ impl TargetInfo { /// /// Search `Tricky` to learn why querying `rustc` several times is needed. pub fn new( - config: &Config, + gctx: &GlobalContext, requested_kinds: &[CompileKind], rustc: &Rustc, kind: CompileKind, ) -> CargoResult { - let mut rustflags = extra_args( - config, - requested_kinds, - &rustc.host, - None, - kind, - Flags::Rust, - )?; + let mut rustflags = + extra_args(gctx, requested_kinds, &rustc.host, None, kind, Flags::Rust)?; let mut turn = 0; loop { let extra_fingerprint = kind.fingerprint_hash(); @@ -176,7 +170,7 @@ impl TargetInfo { // // Search `--print` to see what we query so far. let mut process = rustc.workspace_process(); - apply_env_config(config, &mut process)?; + apply_env_config(gctx, &mut process)?; process .arg("-") .arg("--crate-name") @@ -187,7 +181,7 @@ impl TargetInfo { // Removes `FD_CLOEXEC` set by `jobserver::Client` to pass jobserver // as environment variables specify. - if let Some(client) = config.jobserver_from_env() { + if let Some(client) = gctx.jobserver_from_env() { process.inherit_jobserver(client); } @@ -278,7 +272,7 @@ impl TargetInfo { // recalculate `rustflags` from above now that we have `cfg` // information let new_flags = extra_args( - config, + gctx, requested_kinds, &rustc.host, Some(&cfg), @@ -302,7 +296,7 @@ impl TargetInfo { continue; } if !reached_fixed_point { - config.shell().warn("non-trivial mutual dependency between target-specific configuration and RUSTFLAGS")?; + gctx.shell().warn("non-trivial mutual dependency between target-specific configuration and RUSTFLAGS")?; } return Ok(TargetInfo { @@ -313,7 +307,7 @@ impl TargetInfo { sysroot_target_libdir, rustflags, rustdocflags: extra_args( - config, + gctx, requested_kinds, &rustc.host, Some(&cfg), @@ -705,14 +699,14 @@ impl Flags { /// sources, _regardless of the value of `target-applies-to-host`_. This is counterintuitive, but /// necessary to retain backwards compatibility with older versions of Cargo. fn extra_args( - config: &Config, + gctx: &GlobalContext, requested_kinds: &[CompileKind], host_triple: &str, target_cfg: Option<&[Cfg]>, kind: CompileKind, flags: Flags, ) -> CargoResult> { - let target_applies_to_host = config.target_applies_to_host()?; + let target_applies_to_host = gctx.target_applies_to_host()?; // Host artifacts should not generally pick up rustflags from anywhere except [host]. // @@ -728,7 +722,7 @@ fn extra_args( // --target. Or, phrased differently, no `--target` behaves the same as `--target // `, and host artifacts are always "special" (they don't pick up `RUSTFLAGS` for // example). - return Ok(rustflags_from_host(config, flags, host_triple)?.unwrap_or_else(Vec::new)); + return Ok(rustflags_from_host(gctx, flags, host_triple)?.unwrap_or_else(Vec::new)); } } @@ -736,13 +730,13 @@ fn extra_args( // NOTE: It is impossible to have a [host] section and reach this logic with kind.is_host(), // since [host] implies `target-applies-to-host = false`, which always early-returns above. - if let Some(rustflags) = rustflags_from_env(config, flags) { + if let Some(rustflags) = rustflags_from_env(gctx, flags) { Ok(rustflags) } else if let Some(rustflags) = - rustflags_from_target(config, host_triple, target_cfg, kind, flags)? + rustflags_from_target(gctx, host_triple, target_cfg, kind, flags)? { Ok(rustflags) - } else if let Some(rustflags) = rustflags_from_build(config, flags)? { + } else if let Some(rustflags) = rustflags_from_build(gctx, flags)? { Ok(rustflags) } else { Ok(Vec::new()) @@ -751,10 +745,10 @@ fn extra_args( /// Gets compiler flags from environment variables. /// See [`extra_args`] for more. -fn rustflags_from_env(config: &Config, flags: Flags) -> Option> { +fn rustflags_from_env(gctx: &GlobalContext, flags: Flags) -> Option> { // First try CARGO_ENCODED_RUSTFLAGS from the environment. // Prefer this over RUSTFLAGS since it's less prone to encoding errors. - if let Ok(a) = config.get_env(format!("CARGO_ENCODED_{}", flags.as_env())) { + if let Ok(a) = gctx.get_env(format!("CARGO_ENCODED_{}", flags.as_env())) { if a.is_empty() { return Some(Vec::new()); } @@ -762,7 +756,7 @@ fn rustflags_from_env(config: &Config, flags: Flags) -> Option> { } // Then try RUSTFLAGS from the environment - if let Ok(a) = config.get_env(flags.as_env()) { + if let Ok(a) = gctx.get_env(flags.as_env()) { let args = a .split(' ') .map(str::trim) @@ -778,7 +772,7 @@ fn rustflags_from_env(config: &Config, flags: Flags) -> Option> { /// Gets compiler flags from `[target]` section in the config. /// See [`extra_args`] for more. fn rustflags_from_target( - config: &Config, + gctx: &GlobalContext, host_triple: &str, target_cfg: Option<&[Cfg]>, kind: CompileKind, @@ -792,13 +786,12 @@ fn rustflags_from_target( CompileKind::Target(target) => target.short_name(), }; let key = format!("target.{}.{}", target, flag.as_key()); - if let Some(args) = config.get::>(&key)? { + if let Some(args) = gctx.get::>(&key)? { rustflags.extend(args.as_slice().iter().cloned()); } // ...including target.'cfg(...)'.rustflags if let Some(target_cfg) = target_cfg { - config - .target_cfgs()? + gctx.target_cfgs()? .iter() .filter_map(|(key, cfg)| { match flag { @@ -827,11 +820,11 @@ fn rustflags_from_target( /// Gets compiler flags from `[host]` section in the config. /// See [`extra_args`] for more. fn rustflags_from_host( - config: &Config, + gctx: &GlobalContext, flag: Flags, host_triple: &str, ) -> CargoResult>> { - let target_cfg = config.host_cfg_triple(host_triple)?; + let target_cfg = gctx.host_cfg_triple(host_triple)?; let list = match flag { Flags::Rust => &target_cfg.rustflags, Flags::Rustdoc => { @@ -844,9 +837,9 @@ fn rustflags_from_host( /// Gets compiler flags from `[build]` section in the config. /// See [`extra_args`] for more. -fn rustflags_from_build(config: &Config, flag: Flags) -> CargoResult>> { +fn rustflags_from_build(gctx: &GlobalContext, flag: Flags) -> CargoResult>> { // Then the `build.rustflags` value. - let build = config.build_config()?; + let build = gctx.build_config()?; let list = match flag { Flags::Rust => &build.rustflags, Flags::Rustdoc => &build.rustdocflags, @@ -855,12 +848,12 @@ fn rustflags_from_build(config: &Config, flag: Flags) -> CargoResult { +pub struct RustcTargetData<'gctx> { /// Information about `rustc` itself. pub rustc: Rustc, /// Config - pub config: &'cfg Config, + pub gctx: &'gctx GlobalContext, requested_kinds: Vec, /// Build information for the "host", which is information about when @@ -876,21 +869,21 @@ pub struct RustcTargetData<'cfg> { target_info: HashMap, } -impl<'cfg> RustcTargetData<'cfg> { +impl<'gctx> RustcTargetData<'gctx> { pub fn new( - ws: &Workspace<'cfg>, + ws: &Workspace<'gctx>, requested_kinds: &[CompileKind], - ) -> CargoResult> { - let config = ws.config(); - let rustc = config.load_global_rustc(Some(ws))?; + ) -> CargoResult> { + let gctx = ws.gctx(); + let rustc = gctx.load_global_rustc(Some(ws))?; let mut target_config = HashMap::new(); let mut target_info = HashMap::new(); - let target_applies_to_host = config.target_applies_to_host()?; - let host_info = TargetInfo::new(config, requested_kinds, &rustc, CompileKind::Host)?; + let target_applies_to_host = gctx.target_applies_to_host()?; + let host_info = TargetInfo::new(gctx, requested_kinds, &rustc, CompileKind::Host)?; let host_config = if target_applies_to_host { - config.target_cfg_triple(&rustc.host)? + gctx.target_cfg_triple(&rustc.host)? } else { - config.host_cfg_triple(&rustc.host)? + gctx.host_cfg_triple(&rustc.host)? }; // This is a hack. The unit_dependency graph builder "pretends" that @@ -901,12 +894,12 @@ impl<'cfg> RustcTargetData<'cfg> { if requested_kinds.iter().any(CompileKind::is_host) { let ct = CompileTarget::new(&rustc.host)?; target_info.insert(ct, host_info.clone()); - target_config.insert(ct, config.target_cfg_triple(&rustc.host)?); + target_config.insert(ct, gctx.target_cfg_triple(&rustc.host)?); }; let mut res = RustcTargetData { rustc, - config, + gctx, requested_kinds: requested_kinds.into(), host_config, host_info, @@ -950,12 +943,12 @@ impl<'cfg> RustcTargetData<'cfg> { if let CompileKind::Target(target) = kind { if !self.target_config.contains_key(&target) { self.target_config - .insert(target, self.config.target_cfg_triple(target.short_name())?); + .insert(target, self.gctx.target_cfg_triple(target.short_name())?); } if !self.target_info.contains_key(&target) { self.target_info.insert( target, - TargetInfo::new(self.config, &self.requested_kinds, &self.rustc, kind)?, + TargetInfo::new(self.gctx, &self.requested_kinds, &self.rustc, kind)?, ); } } @@ -1040,15 +1033,23 @@ impl RustDocFingerprint { /// the rustdoc fingerprint info in order to guarantee that we won't end up with mixed /// versions of the `js/html/css` files that `rustdoc` autogenerates which do not have /// any versioning. - pub fn check_rustdoc_fingerprint(cx: &Context<'_, '_>) -> CargoResult<()> { - if cx.bcx.config.cli_unstable().skip_rustdoc_fingerprint { + pub fn check_rustdoc_fingerprint(build_runner: &BuildRunner<'_, '_>) -> CargoResult<()> { + if build_runner + .bcx + .gctx + .cli_unstable() + .skip_rustdoc_fingerprint + { return Ok(()); } let actual_rustdoc_target_data = RustDocFingerprint { - rustc_vv: cx.bcx.rustc().verbose_version.clone(), + rustc_vv: build_runner.bcx.rustc().verbose_version.clone(), }; - let fingerprint_path = cx.files().host_root().join(".rustdoc_fingerprint.json"); + let fingerprint_path = build_runner + .files() + .host_root() + .join(".rustdoc_fingerprint.json"); let write_fingerprint = || -> CargoResult<()> { paths::write( &fingerprint_path, @@ -1083,10 +1084,11 @@ impl RustDocFingerprint { "fingerprint {:?} mismatch, clearing doc directories", fingerprint_path ); - cx.bcx + build_runner + .bcx .all_kinds .iter() - .map(|kind| cx.files().layout(*kind).doc()) + .map(|kind| build_runner.files().layout(*kind).doc()) .filter(|path| path.exists()) .try_for_each(|path| clean_doc(path))?; write_fingerprint()?; diff --git a/src/cargo/core/compiler/build_plan.rs b/src/cargo/core/compiler/build_plan.rs index a024d49908a..d116c07b5be 100644 --- a/src/cargo/core/compiler/build_plan.rs +++ b/src/cargo/core/compiler/build_plan.rs @@ -11,10 +11,10 @@ use std::path::{Path, PathBuf}; use serde::Serialize; -use super::context::OutputFile; -use super::{CompileKind, CompileMode, Context, Unit}; +use super::build_runner::OutputFile; +use super::{BuildRunner, CompileKind, CompileMode, Unit}; use crate::core::TargetKind; -use crate::util::{internal, CargoResult, Config}; +use crate::util::{internal, CargoResult, GlobalContext}; use cargo_util::ProcessBuilder; #[derive(Debug, Serialize)] @@ -107,10 +107,10 @@ impl BuildPlan { } } - pub fn add(&mut self, cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<()> { + pub fn add(&mut self, build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoResult<()> { let id = self.plan.invocations.len(); self.invocation_map.insert(unit.buildkey(), id); - let deps = cx + let deps = build_runner .unit_deps(unit) .iter() .map(|dep| self.invocation_map[&dep.unit.buildkey()]) @@ -144,9 +144,9 @@ impl BuildPlan { self.plan.inputs = inputs; } - pub fn output_plan(self, config: &Config) { + pub fn output_plan(self, gctx: &GlobalContext) { let encoded = serde_json::to_string(&self.plan).unwrap(); - crate::drop_println!(config, "{}", encoded); + crate::drop_println!(gctx, "{}", encoded); } } diff --git a/src/cargo/core/compiler/context/compilation_files.rs b/src/cargo/core/compiler/build_runner/compilation_files.rs similarity index 94% rename from src/cargo/core/compiler/context/compilation_files.rs rename to src/cargo/core/compiler/build_runner/compilation_files.rs index 825044a98a2..27c555a2694 100644 --- a/src/cargo/core/compiler/context/compilation_files.rs +++ b/src/cargo/core/compiler/build_runner/compilation_files.rs @@ -9,7 +9,7 @@ use std::sync::Arc; use lazycell::LazyCell; use tracing::debug; -use super::{BuildContext, CompileKind, Context, FileFlavor, Layout}; +use super::{BuildContext, BuildRunner, CompileKind, FileFlavor, Layout}; use crate::core::compiler::{CompileMode, CompileTarget, CrateType, FileType, Unit}; use crate::core::{Target, TargetKind, Workspace}; use crate::util::{self, CargoResult, StableHasher}; @@ -96,7 +96,7 @@ struct MetaInfo { /// Collection of information about the files emitted by the compiler, and the /// output directory structure. -pub struct CompilationFiles<'a, 'cfg> { +pub struct CompilationFiles<'a, 'gctx> { /// The target directory layout for the host (and target if it is the same as host). pub(super) host: Layout, /// The target directory layout for the target (if different from then host). @@ -106,7 +106,7 @@ pub struct CompilationFiles<'a, 'cfg> { /// The root targets requested by the user on the command line (does not /// include dependencies). roots: Vec, - ws: &'a Workspace<'cfg>, + ws: &'a Workspace<'gctx>, /// Metadata hash to use for each unit. metas: HashMap, /// For each Unit, a list all files produced. @@ -137,15 +137,15 @@ impl OutputFile { } } -impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { +impl<'a, 'gctx: 'a> CompilationFiles<'a, 'gctx> { pub(super) fn new( - cx: &Context<'a, 'cfg>, + build_runner: &BuildRunner<'a, 'gctx>, host: Layout, target: HashMap, - ) -> CompilationFiles<'a, 'cfg> { + ) -> CompilationFiles<'a, 'gctx> { let mut metas = HashMap::new(); - for unit in &cx.bcx.roots { - metadata_of(unit, cx, &mut metas); + for unit in &build_runner.bcx.roots { + metadata_of(unit, build_runner, &mut metas); } let outputs = metas .keys() @@ -153,11 +153,11 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { .map(|unit| (unit, LazyCell::new())) .collect(); CompilationFiles { - ws: cx.bcx.ws, + ws: build_runner.bcx.ws, host, target, - export_dir: cx.bcx.build_config.export_dir.clone(), - roots: cx.bcx.roots.clone(), + export_dir: build_runner.bcx.build_config.export_dir.clone(), + roots: build_runner.bcx.roots.clone(), metas, outputs, } @@ -368,7 +368,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { pub(super) fn outputs( &self, unit: &Unit, - bcx: &BuildContext<'a, 'cfg>, + bcx: &BuildContext<'a, 'gctx>, ) -> CargoResult>> { self.outputs[unit] .try_borrow_with(|| self.calc_outputs(unit, bcx)) @@ -432,7 +432,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { fn calc_outputs( &self, unit: &Unit, - bcx: &BuildContext<'a, 'cfg>, + bcx: &BuildContext<'a, 'gctx>, ) -> CargoResult>> { let ret = match unit.mode { CompileMode::Doc { json, .. } => { @@ -493,7 +493,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { fn calc_outputs_rustc( &self, unit: &Unit, - bcx: &BuildContext<'a, 'cfg>, + bcx: &BuildContext<'a, 'gctx>, ) -> CargoResult> { let out_dir = self.out_dir(unit); @@ -557,14 +557,14 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { /// See [`compute_metadata`] for how a single metadata hash is computed. fn metadata_of<'a>( unit: &Unit, - cx: &Context<'_, '_>, + build_runner: &BuildRunner<'_, '_>, metas: &'a mut HashMap, ) -> &'a MetaInfo { if !metas.contains_key(unit) { - let meta = compute_metadata(unit, cx, metas); + let meta = compute_metadata(unit, build_runner, metas); metas.insert(unit.clone(), meta); - for dep in cx.unit_deps(unit) { - metadata_of(&dep.unit, cx, metas); + for dep in build_runner.unit_deps(unit) { + metadata_of(&dep.unit, build_runner, metas); } } &metas[unit] @@ -573,10 +573,10 @@ fn metadata_of<'a>( /// Computes the metadata hash for the given [`Unit`]. fn compute_metadata( unit: &Unit, - cx: &Context<'_, '_>, + build_runner: &BuildRunner<'_, '_>, metas: &mut HashMap, ) -> MetaInfo { - let bcx = &cx.bcx; + let bcx = &build_runner.bcx; let mut hasher = StableHasher::new(); METADATA_VERSION.hash(&mut hasher); @@ -593,10 +593,10 @@ fn compute_metadata( unit.features.hash(&mut hasher); // Mix in the target-metadata of all the dependencies of this target. - let mut deps_metadata = cx + let mut deps_metadata = build_runner .unit_deps(unit) .iter() - .map(|dep| metadata_of(&dep.unit, cx, metas).meta_hash) + .map(|dep| metadata_of(&dep.unit, build_runner, metas).meta_hash) .collect::>(); deps_metadata.sort(); deps_metadata.hash(&mut hasher); @@ -606,7 +606,7 @@ fn compute_metadata( // settings like debuginfo and whatnot. unit.profile.hash(&mut hasher); unit.mode.hash(&mut hasher); - cx.lto[unit].hash(&mut hasher); + build_runner.lto[unit].hash(&mut hasher); // Artifacts compiled for the host should have a different // metadata piece than those compiled for the target, so make sure @@ -622,17 +622,21 @@ fn compute_metadata( hash_rustc_version(bcx, &mut hasher); - if cx.bcx.ws.is_member(&unit.pkg) { + if build_runner.bcx.ws.is_member(&unit.pkg) { // This is primarily here for clippy. This ensures that the clippy // artifacts are separate from the `check` ones. - if let Some(path) = &cx.bcx.rustc().workspace_wrapper { + if let Some(path) = &build_runner.bcx.rustc().workspace_wrapper { path.hash(&mut hasher); } } // Seed the contents of `__CARGO_DEFAULT_LIB_METADATA` to the hasher if present. // This should be the release channel, to get a different hash for each channel. - if let Ok(ref channel) = cx.bcx.config.get_env("__CARGO_DEFAULT_LIB_METADATA") { + if let Ok(ref channel) = build_runner + .bcx + .gctx + .get_env("__CARGO_DEFAULT_LIB_METADATA") + { channel.hash(&mut hasher); } @@ -654,7 +658,7 @@ fn compute_metadata( /// Hash the version of rustc being used during the build process. fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut StableHasher) { let vers = &bcx.rustc().version; - if vers.pre.is_empty() || bcx.config.cli_unstable().separate_nightlies { + if vers.pre.is_empty() || bcx.gctx.cli_unstable().separate_nightlies { // For stable, keep the artifacts separate. This helps if someone is // testing multiple versions, to avoid recompiles. bcx.rustc().verbose_version.hash(hasher); @@ -721,7 +725,7 @@ fn should_use_metadata(bcx: &BuildContext<'_, '_>, unit: &Unit) -> bool { || (unit.target.is_executable() && short_name == "wasm32-unknown-emscripten") || (unit.target.is_executable() && short_name.contains("msvc"))) && unit.pkg.package_id().source_id().is_path() - && bcx.config.get_env("__CARGO_DEFAULT_LIB_METADATA").is_err() + && bcx.gctx.get_env("__CARGO_DEFAULT_LIB_METADATA").is_err() { return false; } diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/build_runner/mod.rs similarity index 97% rename from src/cargo/core/compiler/context/mod.rs rename to src/cargo/core/compiler/build_runner/mod.rs index 3aedf515cc7..1be9769ac29 100644 --- a/src/cargo/core/compiler/context/mod.rs +++ b/src/cargo/core/compiler/build_runner/mod.rs @@ -1,4 +1,4 @@ -//! [`Context`] is the mutable state used during the build process. +//! [`BuildRunner`] is the mutable state used during the build process. use std::collections::{BTreeSet, HashMap, HashSet}; use std::path::{Path, PathBuf}; @@ -36,11 +36,11 @@ pub use self::compilation_files::{Metadata, OutputFile}; /// throughout the entire build process. Everything is coordinated through this. /// /// [`BuildContext`]: crate::core::compiler::BuildContext -pub struct Context<'a, 'cfg> { +pub struct BuildRunner<'a, 'gctx> { /// Mostly static information about the build task. - pub bcx: &'a BuildContext<'a, 'cfg>, + pub bcx: &'a BuildContext<'a, 'gctx>, /// A large collection of information about the result of the entire compilation. - pub compilation: Compilation<'cfg>, + pub compilation: Compilation<'gctx>, /// Output from build scripts, updated after each build script runs. pub build_script_outputs: Arc>, /// Dependencies (like rerun-if-changed) declared by a build script. @@ -67,7 +67,7 @@ pub struct Context<'a, 'cfg> { /// An abstraction of the files and directories that will be generated by /// the compilation. This is `None` until after `unit_dependencies` has /// been computed. - files: Option>, + files: Option>, /// A set of units which are compiling rlibs and are expected to produce /// metadata files in addition to the rlib itself. @@ -88,8 +88,8 @@ pub struct Context<'a, 'cfg> { pub failed_scrape_units: Arc>>, } -impl<'a, 'cfg> Context<'a, 'cfg> { - pub fn new(bcx: &'a BuildContext<'a, 'cfg>) -> CargoResult { +impl<'a, 'gctx> BuildRunner<'a, 'gctx> { + pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult { // Load up the jobserver that we'll use to manage our parallelism. This // is the same as the GNU make implementation of a jobserver, and // intentionally so! It's hoped that we can interact with GNU make and @@ -98,7 +98,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { // Note that if we don't have a jobserver in our environment then we // create our own, and we create it with `n` tokens, but immediately // acquire one, because one token is ourself, a running process. - let jobserver = match bcx.config.jobserver_from_env() { + let jobserver = match bcx.gctx.jobserver_from_env() { Some(c) => c.clone(), None => { let client = Client::new(bcx.jobs() as usize) @@ -133,13 +133,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// See [`ops::cargo_compile`] for a higher-level view of the compile process. /// /// [`ops::cargo_compile`]: ../../../ops/cargo_compile/index.html - pub fn compile(mut self, exec: &Arc) -> CargoResult> { + pub fn compile(mut self, exec: &Arc) -> CargoResult> { // A shared lock is held during the duration of the build since rustc // needs to read from the `src` cache, and we don't want other // commands modifying the `src` cache while it is running. let _lock = self .bcx - .config + .gctx .acquire_package_cache_lock(CacheLockMode::Shared)?; let mut queue = JobQueue::new(self.bcx); let mut plan = BuildPlan::new(); @@ -183,7 +183,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { if build_plan { plan.set_inputs(self.build_plan_inputs()?); - plan.output_plan(self.bcx.config); + plan.output_plan(self.bcx.gctx); } // Add `OUT_DIR` to env vars if unit has a build script. @@ -377,7 +377,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { Ok(()) } - pub fn files(&self) -> &CompilationFiles<'a, 'cfg> { + pub fn files(&self) -> &CompilationFiles<'a, 'gctx> { self.files.as_ref().unwrap() } @@ -480,7 +480,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { suggestion: &str| -> CargoResult<()> { if unit.target.name() == other_unit.target.name() { - self.bcx.config.shell().warn(format!( + self.bcx.gctx.shell().warn(format!( "output filename collision.\n\ {}\ The targets should have unique names.\n\ @@ -489,7 +489,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { suggestion )) } else { - self.bcx.config.shell().warn(format!( + self.bcx.gctx.shell().warn(format!( "output filename collision.\n\ {}\ The output filenames should be unique.\n\ @@ -575,7 +575,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { } if let Some(ref export_path) = output.export_path { if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) { - self.bcx.config.shell().warn(format!( + self.bcx.gctx.shell().warn(format!( "`--out-dir` filename collision.\n\ {}\ The exported filenames should be unique.\n\ diff --git a/src/cargo/core/compiler/compilation.rs b/src/cargo/core/compiler/compilation.rs index 8744221b2d7..f6ddf34b07d 100644 --- a/src/cargo/core/compiler/compilation.rs +++ b/src/cargo/core/compiler/compilation.rs @@ -11,7 +11,7 @@ use crate::core::compiler::apply_env_config; use crate::core::compiler::BuildContext; use crate::core::compiler::{CompileKind, Metadata, Unit}; use crate::core::Package; -use crate::util::{config, CargoResult, Config}; +use crate::util::{config, CargoResult, GlobalContext}; /// Structure with enough information to run `rustdoc --test`. pub struct Doctest { @@ -46,7 +46,7 @@ pub struct UnitOutput { } /// A structure returning the result of a compilation. -pub struct Compilation<'cfg> { +pub struct Compilation<'gctx> { /// An array of all tests created during this compilation. pub tests: Vec, @@ -93,7 +93,7 @@ pub struct Compilation<'cfg> { /// The target host triple. pub host: String, - config: &'cfg Config, + gctx: &'gctx GlobalContext, /// Rustc process to be used by default rustc_process: ProcessBuilder, @@ -108,13 +108,13 @@ pub struct Compilation<'cfg> { target_linkers: HashMap>, } -impl<'cfg> Compilation<'cfg> { - pub fn new<'a>(bcx: &BuildContext<'a, 'cfg>) -> CargoResult> { +impl<'gctx> Compilation<'gctx> { + pub fn new<'a>(bcx: &BuildContext<'a, 'gctx>) -> CargoResult> { let mut rustc = bcx.rustc().process(); let mut primary_rustc_process = bcx.build_config.primary_unit_rustc.clone(); let mut rustc_workspace_wrapper_process = bcx.rustc().workspace_process(); - if bcx.config.extra_verbose() { + if bcx.gctx.extra_verbose() { rustc.display_env_vars(); rustc_workspace_wrapper_process.display_env_vars(); @@ -140,7 +140,7 @@ impl<'cfg> Compilation<'cfg> { root_crate_names: Vec::new(), extra_env: HashMap::new(), to_doc_test: Vec::new(), - config: bcx.config, + gctx: bcx.gctx, host: bcx.host_triple().to_string(), rustc_process: rustc, rustc_workspace_wrapper_process, @@ -166,7 +166,7 @@ impl<'cfg> Compilation<'cfg> { /// /// `is_primary` is true if this is a "primary package", which means it /// was selected by the user on the command-line (such as with a `-p` - /// flag), see [`crate::core::compiler::Context::primary_packages`]. + /// flag), see [`crate::core::compiler::BuildRunner::primary_packages`]. /// /// `is_workspace` is true if this is a workspace member. pub fn rustc_process( @@ -193,7 +193,7 @@ impl<'cfg> Compilation<'cfg> { unit: &Unit, script_meta: Option, ) -> CargoResult { - let rustdoc = ProcessBuilder::new(&*self.config.rustdoc()?); + let rustdoc = ProcessBuilder::new(&*self.gctx.rustdoc()?); let cmd = fill_rustc_tool_env(rustdoc, unit); let mut cmd = self.fill_env(cmd, &unit.pkg, script_meta, unit.kind, true)?; cmd.retry_with_argfile(true); @@ -259,7 +259,7 @@ impl<'cfg> Compilation<'cfg> { }; let mut builder = self.fill_env(builder, pkg, script_meta, kind, false)?; - if let Some(client) = self.config.jobserver_from_env() { + if let Some(client) = self.gctx.jobserver_from_env() { builder.inherit_jobserver(client); } @@ -294,7 +294,7 @@ impl<'cfg> Compilation<'cfg> { // libs from the sysroot that ships with rustc. This may not be // required (at least I cannot craft a situation where it // matters), but is here to be safe. - if self.config.cli_unstable().build_std.is_none() { + if self.gctx.cli_unstable().build_std.is_none() { search_path.push(self.sysroot_target_libdir[&kind].clone()); } } @@ -306,7 +306,7 @@ impl<'cfg> Compilation<'cfg> { // These are the defaults when DYLD_FALLBACK_LIBRARY_PATH isn't // set or set to an empty string. Since Cargo is explicitly setting // the value, make sure the defaults still work. - if let Some(home) = self.config.get_env_os("HOME") { + if let Some(home) = self.gctx.get_env_os("HOME") { search_path.push(PathBuf::from(home).join("lib")); } search_path.push(PathBuf::from("/usr/local/lib")); @@ -325,7 +325,7 @@ impl<'cfg> Compilation<'cfg> { let metadata = pkg.manifest().metadata(); - let cargo_exe = self.config.cargo_exe()?; + let cargo_exe = self.gctx.cargo_exe()?; cmd.env(crate::CARGO_ENV, cargo_exe); // When adding new environment variables depending on @@ -371,7 +371,7 @@ impl<'cfg> Compilation<'cfg> { ) .cwd(pkg.root()); - apply_env_config(self.config, &mut cmd)?; + apply_env_config(self.gctx, &mut cmd)?; Ok(cmd) } @@ -430,15 +430,15 @@ fn target_runner( // try target.{}.runner let key = format!("target.{}.runner", target); - if let Some(v) = bcx.config.get::>(&key)? { - let path = v.path.resolve_program(bcx.config); + if let Some(v) = bcx.gctx.get::>(&key)? { + let path = v.path.resolve_program(bcx.gctx); return Ok(Some((path, v.args))); } // try target.'cfg(...)'.runner let target_cfg = bcx.target_data.info(kind).cfg(); let mut cfgs = bcx - .config + .gctx .target_cfgs()? .iter() .filter_map(|(key, cfg)| cfg.runner.as_ref().map(|runner| (key, runner))) @@ -457,7 +457,7 @@ fn target_runner( } Ok(matching_runner.map(|(_k, runner)| { ( - runner.val.path.clone().resolve_program(bcx.config), + runner.val.path.clone().resolve_program(bcx.gctx), runner.val.args.clone(), ) })) @@ -471,7 +471,7 @@ fn target_linker(bcx: &BuildContext<'_, '_>, kind: CompileKind) -> CargoResult, kind: CompileKind) -> CargoResult, kind: CompileKind) -> CargoResult CargoResult> { let dedup = |targets: &[String]| { @@ -70,9 +70,9 @@ impl CompileKind { return dedup(targets); } - let kinds = match &config.build_config()?.target { + let kinds = match &gctx.build_config()?.target { None => Ok(vec![CompileKind::Host]), - Some(build_target_config) => dedup(&build_target_config.values(config)?), + Some(build_target_config) => dedup(&build_target_config.values(gctx)?), }; kinds diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs index bd88e8e8f8f..065f012b94f 100644 --- a/src/cargo/core/compiler/custom_build.rs +++ b/src/cargo/core/compiler/custom_build.rs @@ -22,7 +22,7 @@ //! of a build script. Standard output is the chosen interprocess communication //! between Cargo and build script processes. A set of strings is defined for //! that purpose. These strings, a.k.a. instructions, are interpreted by -//! [`BuildOutput::parse`] and stored in [`Context::build_script_outputs`]. +//! [`BuildOutput::parse`] and stored in [`BuildRunner::build_script_outputs`]. //! The entire execution work is constructed by [`build_work`]. //! //! [build script]: https://doc.rust-lang.org/nightly/cargo/reference/build-scripts.html @@ -31,9 +31,9 @@ //! [`CompileMode::RunCustomBuild`]: super::CompileMode //! [instructions]: https://doc.rust-lang.org/cargo/reference/build-scripts.html#outputs-of-the-build-script -use super::{fingerprint, Context, Job, Unit, Work}; +use super::{fingerprint, BuildRunner, Job, Unit, Work}; use crate::core::compiler::artifact; -use crate::core::compiler::context::Metadata; +use crate::core::compiler::build_runner::Metadata; use crate::core::compiler::fingerprint::DirtyReason; use crate::core::compiler::job_queue::JobState; use crate::core::{profiles::ProfileRoot, PackageId, Target}; @@ -194,24 +194,24 @@ impl LinkArgTarget { } /// Prepares a `Work` that executes the target as a custom build script. -pub fn prepare(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { +pub fn prepare(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResult { let _p = profile::start(format!( "build script prepare: {}/{}", unit.pkg, unit.target.name() )); - let metadata = cx.get_run_build_script_metadata(unit); - if cx + let metadata = build_runner.get_run_build_script_metadata(unit); + if build_runner .build_script_outputs .lock() .unwrap() .contains_key(metadata) { // The output is already set, thus the build script is overridden. - fingerprint::prepare_target(cx, unit, false) + fingerprint::prepare_target(build_runner, unit, false) } else { - build_work(cx, unit) + build_work(build_runner, unit) } } @@ -250,23 +250,23 @@ fn emit_build_output( /// * Create the output dir (`OUT_DIR`) for the build script output. /// * Determine if the build script needs a re-run. /// * Run the build script and store its output. -fn build_work(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { +fn build_work(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResult { assert!(unit.mode.is_run_custom_build()); - let bcx = &cx.bcx; - let dependencies = cx.unit_deps(unit); + let bcx = &build_runner.bcx; + let dependencies = build_runner.unit_deps(unit); let build_script_unit = dependencies .iter() .find(|d| !d.unit.mode.is_run_custom_build() && d.unit.target.is_custom_build()) .map(|d| &d.unit) .expect("running a script not depending on an actual script"); - let script_dir = cx.files().build_script_dir(build_script_unit); - let script_out_dir = cx.files().build_script_out_dir(unit); - let script_run_dir = cx.files().build_script_run_dir(unit); + let script_dir = build_runner.files().build_script_dir(build_script_unit); + let script_out_dir = build_runner.files().build_script_out_dir(unit); + let script_run_dir = build_runner.files().build_script_run_dir(unit); let build_plan = bcx.build_config.build_plan; let invocation_name = unit.buildkey(); if let Some(deps) = unit.pkg.manifest().metabuild() { - prepare_metabuild(cx, build_script_unit, deps)?; + prepare_metabuild(build_runner, build_script_unit, deps)?; } // Building the command to execute @@ -280,7 +280,7 @@ fn build_work(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { // `Profiles::get_profile_run_custom_build` so that those flags get // carried over. let to_exec = to_exec.into_os_string(); - let mut cmd = cx.compilation.host_process(to_exec, &unit.pkg)?; + let mut cmd = build_runner.compilation.host_process(to_exec, &unit.pkg)?; let debug = unit.profile.debuginfo.is_turned_on(); cmd.env("OUT_DIR", &script_out_dir) .env("CARGO_MANIFEST_DIR", unit.pkg.root()) @@ -297,15 +297,15 @@ fn build_work(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { ) .env("HOST", &bcx.host_triple()) .env("RUSTC", &bcx.rustc().path) - .env("RUSTDOC", &*bcx.config.rustdoc()?) - .inherit_jobserver(&cx.jobserver); + .env("RUSTDOC", &*bcx.gctx.rustdoc()?) + .inherit_jobserver(&build_runner.jobserver); // Find all artifact dependencies and make their file and containing directory discoverable using environment variables. - for (var, value) in artifact::get_env(cx, dependencies)? { + for (var, value) in artifact::get_env(build_runner, dependencies)? { cmd.env(&var, value); } - if let Some(linker) = &cx.compilation.target_linker(unit.kind) { + if let Some(linker) = &build_runner.compilation.target_linker(unit.kind) { cmd.env("RUSTC_LINKER", linker); } @@ -352,7 +352,7 @@ fn build_work(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { cmd.env_remove("RUSTC_WRAPPER"); } cmd.env_remove("RUSTC_WORKSPACE_WRAPPER"); - if cx.bcx.ws.is_member(&unit.pkg) { + if build_runner.bcx.ws.is_member(&unit.pkg) { if let Some(wrapper) = bcx.rustc().workspace_wrapper.as_ref() { cmd.env("RUSTC_WORKSPACE_WRAPPER", wrapper); } @@ -363,7 +363,7 @@ fn build_work(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { ); cmd.env_remove("RUSTFLAGS"); - if cx.bcx.ws.config().extra_verbose() { + if build_runner.bcx.ws.gctx().extra_verbose() { cmd.display_env_vars(); } @@ -376,7 +376,7 @@ fn build_work(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { .iter() .filter_map(|dep| { if dep.unit.mode.is_run_custom_build() { - let dep_metadata = cx.get_run_build_script_metadata(&dep.unit); + let dep_metadata = build_runner.get_run_build_script_metadata(&dep.unit); Some(( dep.unit.pkg.manifest().links().unwrap().to_string(), dep.unit.pkg.package_id(), @@ -389,12 +389,12 @@ fn build_work(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { .collect::>(); let library_name = unit.pkg.library().map(|t| t.crate_name()); let pkg_descr = unit.pkg.to_string(); - let build_script_outputs = Arc::clone(&cx.build_script_outputs); + let build_script_outputs = Arc::clone(&build_runner.build_script_outputs); let id = unit.pkg.package_id(); let output_file = script_run_dir.join("output"); let err_file = script_run_dir.join("stderr"); let root_output_file = script_run_dir.join("root-output"); - let host_target_root = cx.files().host_dest().to_path_buf(); + let host_target_root = build_runner.files().host_dest().to_path_buf(); let all = ( id, library_name.clone(), @@ -403,17 +403,17 @@ fn build_work(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { output_file.clone(), script_out_dir.clone(), ); - let build_scripts = cx.build_scripts.get(unit).cloned(); + let build_scripts = build_runner.build_scripts.get(unit).cloned(); let json_messages = bcx.build_config.emit_json(); - let extra_verbose = bcx.config.extra_verbose(); - let (prev_output, prev_script_out_dir) = prev_build_output(cx, unit); - let metadata_hash = cx.get_run_build_script_metadata(unit); + let extra_verbose = bcx.gctx.extra_verbose(); + let (prev_output, prev_script_out_dir) = prev_build_output(build_runner, unit); + let metadata_hash = build_runner.get_run_build_script_metadata(unit); paths::create_dir_all(&script_dir)?; paths::create_dir_all(&script_out_dir)?; - let nightly_features_allowed = cx.bcx.config.nightly_features_allowed; - let extra_check_cfg = cx.bcx.config.cli_unstable().check_cfg; + let nightly_features_allowed = build_runner.bcx.gctx.nightly_features_allowed; + let extra_check_cfg = build_runner.bcx.gctx.cli_unstable().check_cfg; let targets: Vec = unit.pkg.targets().to_vec(); let msrv = unit.pkg.rust_version().cloned(); // Need a separate copy for the fresh closure. @@ -421,7 +421,7 @@ fn build_work(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { let msrv_fresh = msrv.clone(); let env_profile_name = unit.profile.name.to_uppercase(); - let built_with_debuginfo = cx + let built_with_debuginfo = build_runner .bcx .unit_graph .get(unit) @@ -608,10 +608,10 @@ fn build_work(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { Ok(()) }); - let mut job = if cx.bcx.build_config.build_plan { + let mut job = if build_runner.bcx.build_config.build_plan { Job::new_dirty(Work::noop(), DirtyReason::FreshBuild) } else { - fingerprint::prepare_target(cx, unit, false)? + fingerprint::prepare_target(build_runner, unit, false)? }; if job.freshness().is_dirty() { job.before(dirty); @@ -1064,9 +1064,13 @@ impl BuildOutput { /// Prepares the Rust script for the unstable feature [metabuild]. /// /// [metabuild]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#metabuild -fn prepare_metabuild(cx: &Context<'_, '_>, unit: &Unit, deps: &[String]) -> CargoResult<()> { +fn prepare_metabuild( + build_runner: &BuildRunner<'_, '_>, + unit: &Unit, + deps: &[String], +) -> CargoResult<()> { let mut output = Vec::new(); - let available_deps = cx.unit_deps(unit); + let available_deps = build_runner.unit_deps(unit); // Filter out optional dependencies, and look up the actual lib name. let meta_deps: Vec<_> = deps .iter() @@ -1083,7 +1087,10 @@ fn prepare_metabuild(cx: &Context<'_, '_>, unit: &Unit, deps: &[String]) -> Carg } output.push("}\n".to_string()); let output = output.join(""); - let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir()); + let path = unit + .pkg + .manifest() + .metabuild_path(build_runner.bcx.ws.target_dir()); paths::create_dir_all(path.parent().unwrap())?; paths::write_if_changed(path, &output)?; Ok(()) @@ -1107,7 +1114,7 @@ impl BuildDeps { } } -/// Computes several maps in [`Context`]. +/// Computes several maps in [`BuildRunner`]. /// /// - [`build_scripts`]: A map that tracks which build scripts each package /// depends on. @@ -1125,15 +1132,16 @@ impl BuildDeps { /// The given set of units to this function is the initial set of /// targets/profiles which are being built. /// -/// [`build_scripts`]: Context::build_scripts -/// [`build_explicit_deps`]: Context::build_explicit_deps -/// [`build_script_outputs`]: Context::build_script_outputs -pub fn build_map(cx: &mut Context<'_, '_>) -> CargoResult<()> { +/// [`build_scripts`]: BuildRunner::build_scripts +/// [`build_explicit_deps`]: BuildRunner::build_explicit_deps +/// [`build_script_outputs`]: BuildRunner::build_script_outputs +pub fn build_map(build_runner: &mut BuildRunner<'_, '_>) -> CargoResult<()> { let mut ret = HashMap::new(); - for unit in &cx.bcx.roots { - build(&mut ret, cx, unit)?; + for unit in &build_runner.bcx.roots { + build(&mut ret, build_runner, unit)?; } - cx.build_scripts + build_runner + .build_scripts .extend(ret.into_iter().map(|(k, v)| (k, Arc::new(v)))); return Ok(()); @@ -1141,7 +1149,7 @@ pub fn build_map(cx: &mut Context<'_, '_>) -> CargoResult<()> { // memoizes all of its return values as it goes along. fn build<'a>( out: &'a mut HashMap, - cx: &mut Context<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, unit: &Unit, ) -> CargoResult<&'a BuildScripts> { // Do a quick pre-flight check to see if we've already calculated the @@ -1153,9 +1161,13 @@ pub fn build_map(cx: &mut Context<'_, '_>) -> CargoResult<()> { // If there is a build script override, pre-fill the build output. if unit.mode.is_run_custom_build() { if let Some(links) = unit.pkg.manifest().links() { - if let Some(output) = cx.bcx.target_data.script_override(links, unit.kind) { - let metadata = cx.get_run_build_script_metadata(unit); - cx.build_script_outputs.lock().unwrap().insert( + if let Some(output) = build_runner + .bcx + .target_data + .script_override(links, unit.kind) + { + let metadata = build_runner.get_run_build_script_metadata(unit); + build_runner.build_script_outputs.lock().unwrap().insert( unit.pkg.package_id(), metadata, output.clone(), @@ -1168,26 +1180,29 @@ pub fn build_map(cx: &mut Context<'_, '_>) -> CargoResult<()> { // If a package has a build script, add itself as something to inspect for linking. if !unit.target.is_custom_build() && unit.pkg.has_custom_build() { - let script_meta = cx + let script_meta = build_runner .find_build_script_metadata(unit) .expect("has_custom_build should have RunCustomBuild"); add_to_link(&mut ret, unit.pkg.package_id(), script_meta); } if unit.mode.is_run_custom_build() { - parse_previous_explicit_deps(cx, unit); + parse_previous_explicit_deps(build_runner, unit); } // We want to invoke the compiler deterministically to be cache-friendly // to rustc invocation caching schemes, so be sure to generate the same // set of build script dependency orderings via sorting the targets that // come out of the `Context`. - let mut dependencies: Vec = - cx.unit_deps(unit).iter().map(|d| d.unit.clone()).collect(); + let mut dependencies: Vec = build_runner + .unit_deps(unit) + .iter() + .map(|d| d.unit.clone()) + .collect(); dependencies.sort_by_key(|u| u.pkg.package_id()); for dep_unit in dependencies.iter() { - let dep_scripts = build(out, cx, dep_unit)?; + let dep_scripts = build(out, build_runner, dep_unit)?; if dep_unit.target.for_host() { ret.plugins.extend(dep_scripts.to_link.iter().cloned()); @@ -1213,12 +1228,12 @@ pub fn build_map(cx: &mut Context<'_, '_>) -> CargoResult<()> { } /// Load any dependency declarations from a previous build script run. - fn parse_previous_explicit_deps(cx: &mut Context<'_, '_>, unit: &Unit) { - let script_run_dir = cx.files().build_script_run_dir(unit); + fn parse_previous_explicit_deps(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) { + let script_run_dir = build_runner.files().build_script_run_dir(unit); let output_file = script_run_dir.join("output"); - let (prev_output, _) = prev_build_output(cx, unit); + let (prev_output, _) = prev_build_output(build_runner, unit); let deps = BuildDeps::new(&output_file, prev_output.as_ref()); - cx.build_explicit_deps.insert(unit.clone(), deps); + build_runner.build_explicit_deps.insert(unit.clone(), deps); } } @@ -1227,9 +1242,12 @@ pub fn build_map(cx: &mut Context<'_, '_>) -> CargoResult<()> { /// /// Also returns the directory containing the output, typically used later in /// processing. -fn prev_build_output(cx: &mut Context<'_, '_>, unit: &Unit) -> (Option, PathBuf) { - let script_out_dir = cx.files().build_script_out_dir(unit); - let script_run_dir = cx.files().build_script_run_dir(unit); +fn prev_build_output( + build_runner: &mut BuildRunner<'_, '_>, + unit: &Unit, +) -> (Option, PathBuf) { + let script_out_dir = build_runner.files().build_script_out_dir(unit); + let script_run_dir = build_runner.files().build_script_run_dir(unit); let root_output_file = script_run_dir.join("root-output"); let output_file = script_run_dir.join("output"); @@ -1244,8 +1262,8 @@ fn prev_build_output(cx: &mut Context<'_, '_>, unit: &Unit) -> (Option, unit: &Unit, force: bool) -> CargoResult { +pub fn prepare_target( + build_runner: &mut BuildRunner<'_, '_>, + unit: &Unit, + force: bool, +) -> CargoResult { let _p = profile::start(format!( "fingerprint: {} / {}", unit.pkg.package_id(), unit.target.name() )); - let bcx = cx.bcx; - let loc = cx.files().fingerprint_file_path(unit, ""); + let bcx = build_runner.bcx; + let loc = build_runner.files().fingerprint_file_path(unit, ""); debug!("fingerprint at: {}", loc.display()); // Figure out if this unit is up to date. After calculating the fingerprint // compare it to an old version, if any, and attempt to print diagnostic // information about failed comparisons to aid in debugging. - let fingerprint = calculate(cx, unit)?; - let mtime_on_use = cx.bcx.config.cli_unstable().mtime_on_use; + let fingerprint = calculate(build_runner, unit)?; + let mtime_on_use = build_runner.bcx.gctx.cli_unstable().mtime_on_use; let dirty_reason = compare_old_fingerprint(unit, &loc, &*fingerprint, mtime_on_use, force); let Some(dirty_reason) = dirty_reason else { @@ -481,10 +485,12 @@ pub fn prepare_target(cx: &mut Context<'_, '_>, unit: &Unit, force: bool) -> Car // build script's fingerprint after it's executed. We do this by // using the `build_script_local_fingerprints` function which returns a // thunk we can invoke on a foreign thread to calculate this. - let build_script_outputs = Arc::clone(&cx.build_script_outputs); - let metadata = cx.get_run_build_script_metadata(unit); - let (gen_local, _overridden) = build_script_local_fingerprints(cx, unit); - let output_path = cx.build_explicit_deps[unit].build_script_output.clone(); + let build_script_outputs = Arc::clone(&build_runner.build_script_outputs); + let metadata = build_runner.get_run_build_script_metadata(unit); + let (gen_local, _overridden) = build_script_local_fingerprints(build_runner, unit); + let output_path = build_runner.build_explicit_deps[unit] + .build_script_output + .clone(); Work::new(move |_| { let outputs = build_script_outputs.lock().unwrap(); let output = outputs @@ -787,7 +793,7 @@ impl LocalFingerprint { pkg_root: &Path, target_root: &Path, cargo_exe: &Path, - config: &Config, + gctx: &GlobalContext, ) -> CargoResult> { match self { // We need to parse `dep_info`, learn about the crate's dependencies. @@ -815,7 +821,7 @@ impl LocalFingerprint { .to_string(), ) } else { - config.get_env(key).ok() + gctx.get_env(key).ok() }; if current == *previous { continue; @@ -1071,7 +1077,7 @@ impl Fingerprint { pkg_root: &Path, target_root: &Path, cargo_exe: &Path, - config: &Config, + gctx: &GlobalContext, ) -> CargoResult<()> { assert!(!self.fs_status.up_to_date()); @@ -1173,7 +1179,7 @@ impl Fingerprint { // message and bail out so we stay stale. for local in self.local.get_mut().unwrap().iter() { if let Some(item) = - local.find_stale_item(mtime_cache, pkg_root, target_root, cargo_exe, config)? + local.find_stale_item(mtime_cache, pkg_root, target_root, cargo_exe, gctx)? { item.log(); self.fs_status = FsStatus::StaleItem(item); @@ -1241,8 +1247,12 @@ impl hash::Hash for Fingerprint { } impl DepFingerprint { - fn new(cx: &mut Context<'_, '_>, parent: &Unit, dep: &UnitDep) -> CargoResult { - let fingerprint = calculate(cx, &dep.unit)?; + fn new( + build_runner: &mut BuildRunner<'_, '_>, + parent: &Unit, + dep: &UnitDep, + ) -> CargoResult { + let fingerprint = calculate(build_runner, &dep.unit)?; // We need to be careful about what we hash here. We have a goal of // supporting renaming a project directory and not rebuilding // everything. To do that, however, we need to make sure that the cwd @@ -1264,7 +1274,7 @@ impl DepFingerprint { name: dep.extern_crate_name, public: dep.public, fingerprint, - only_requires_rmeta: cx.only_requires_rmeta(parent, &dep.unit), + only_requires_rmeta: build_runner.only_requires_rmeta(parent, &dep.unit), }) } } @@ -1315,40 +1325,44 @@ impl StaleItem { /// /// Information like file modification time is only calculated for path /// dependencies. -fn calculate(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult> { +fn calculate(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResult> { // This function is slammed quite a lot, so the result is memoized. - if let Some(s) = cx.fingerprints.get(unit) { + if let Some(s) = build_runner.fingerprints.get(unit) { return Ok(Arc::clone(s)); } let mut fingerprint = if unit.mode.is_run_custom_build() { - calculate_run_custom_build(cx, unit)? + calculate_run_custom_build(build_runner, unit)? } else if unit.mode.is_doc_test() { panic!("doc tests do not fingerprint"); } else { - calculate_normal(cx, unit)? + calculate_normal(build_runner, unit)? }; // After we built the initial `Fingerprint` be sure to update the // `fs_status` field of it. - let target_root = target_root(cx); - let cargo_exe = cx.bcx.config.cargo_exe()?; + let target_root = target_root(build_runner); + let cargo_exe = build_runner.bcx.gctx.cargo_exe()?; fingerprint.check_filesystem( - &mut cx.mtime_cache, + &mut build_runner.mtime_cache, unit.pkg.root(), &target_root, cargo_exe, - cx.bcx.config, + build_runner.bcx.gctx, )?; let fingerprint = Arc::new(fingerprint); - cx.fingerprints + build_runner + .fingerprints .insert(unit.clone(), Arc::clone(&fingerprint)); Ok(fingerprint) } /// Calculate a fingerprint for a "normal" unit, or anything that's not a build /// script. This is an internal helper of [`calculate`], don't call directly. -fn calculate_normal(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { +fn calculate_normal( + build_runner: &mut BuildRunner<'_, '_>, + unit: &Unit, +) -> CargoResult { let deps = { // Recursively calculate the fingerprint for all of our dependencies. // @@ -1357,22 +1371,22 @@ fn calculate_normal(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult>>()?; deps.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id)); deps }; // Afterwards calculate our own fingerprint information. - let target_root = target_root(cx); + let target_root = target_root(build_runner); let local = if unit.mode.is_doc() || unit.mode.is_doc_scrape() { // rustdoc does not have dep-info files. - let fingerprint = pkg_fingerprint(cx.bcx, &unit.pkg).with_context(|| { + let fingerprint = pkg_fingerprint(build_runner.bcx, &unit.pkg).with_context(|| { format!( "failed to determine package fingerprint for documenting {}", unit.pkg @@ -1380,14 +1394,14 @@ fn calculate_normal(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult { +fn calculate_run_custom_build( + build_runner: &mut BuildRunner<'_, '_>, + unit: &Unit, +) -> CargoResult { assert!(unit.mode.is_run_custom_build()); // Using the `BuildDeps` information we'll have previously parsed and // inserted into `build_explicit_deps` built an initial snapshot of the @@ -1468,8 +1485,8 @@ fn calculate_run_custom_build(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoRes // the build script this means we'll be watching files and env vars. // Otherwise if we haven't previously executed it we'll just start watching // the whole crate. - let (gen_local, overridden) = build_script_local_fingerprints(cx, unit); - let deps = &cx.build_explicit_deps[unit]; + let (gen_local, overridden) = build_script_local_fingerprints(build_runner, unit); + let deps = &build_runner.build_explicit_deps[unit]; let local = (gen_local)( deps, Some(&|| { @@ -1480,7 +1497,7 @@ By default, if your project contains a build script, cargo scans all files in it to determine whether a rebuild is needed. If you don't expect to access the file, specify `rerun-if-changed` in your build script. See https://doc.rust-lang.org/cargo/reference/build-scripts.html#rerun-if-changed for more information."; - pkg_fingerprint(cx.bcx, &unit.pkg).map_err(|err| { + pkg_fingerprint(build_runner.bcx, &unit.pkg).map_err(|err| { let mut message = format!("failed to determine package fingerprint for build script for {}", unit.pkg); if err.root_cause().is::() { message = format!("{}\n{}", message, IO_ERR_MESSAGE) @@ -1500,16 +1517,16 @@ See https://doc.rust-lang.org/cargo/reference/build-scripts.html#rerun-if-change // Overridden build scripts don't need to track deps. vec![] } else { - // Create Vec since mutable cx is needed in closure. - let deps = Vec::from(cx.unit_deps(unit)); + // Create Vec since mutable build_runner is needed in closure. + let deps = Vec::from(build_runner.unit_deps(unit)); deps.into_iter() - .map(|dep| DepFingerprint::new(cx, unit, &dep)) + .map(|dep| DepFingerprint::new(build_runner, unit, &dep)) .collect::>>()? }; Ok(Fingerprint { local: Mutex::new(local), - rustc: util::hash_u64(&cx.bcx.rustc().verbose_version), + rustc: util::hash_u64(&build_runner.bcx.rustc().verbose_version), deps, outputs: if overridden { Vec::new() } else { vec![output] }, @@ -1557,7 +1574,7 @@ See https://doc.rust-lang.org/cargo/reference/build-scripts.html#rerun-if-change /// /// [`RunCustomBuild`]: crate::core::compiler::CompileMode::RunCustomBuild fn build_script_local_fingerprints( - cx: &mut Context<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, unit: &Unit, ) -> ( Box< @@ -1572,7 +1589,7 @@ fn build_script_local_fingerprints( assert!(unit.mode.is_run_custom_build()); // First up, if this build script is entirely overridden, then we just // return the hash of what we overrode it with. This is the easy case! - if let Some(fingerprint) = build_script_override_fingerprint(cx, unit) { + if let Some(fingerprint) = build_script_override_fingerprint(build_runner, unit) { debug!("override local fingerprints deps {}", unit.pkg); return ( Box::new( @@ -1592,7 +1609,7 @@ fn build_script_local_fingerprints( // longstanding bug, in Cargo. Recent refactorings just made it painfully // obvious. let pkg_root = unit.pkg.root().to_path_buf(); - let target_dir = target_root(cx); + let target_dir = target_root(build_runner); let calculate = move |deps: &BuildDeps, pkg_fingerprint: Option<&dyn Fn() -> CargoResult>| { if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() { @@ -1632,13 +1649,13 @@ fn build_script_local_fingerprints( /// Create a [`LocalFingerprint`] for an overridden build script. /// Returns None if it is not overridden. fn build_script_override_fingerprint( - cx: &mut Context<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, unit: &Unit, ) -> Option { // Build script output is only populated at this stage when it is // overridden. - let build_script_outputs = cx.build_script_outputs.lock().unwrap(); - let metadata = cx.get_run_build_script_metadata(unit); + let build_script_outputs = build_runner.build_script_outputs.lock().unwrap(); + let metadata = build_runner.get_run_build_script_metadata(unit); // Returns None if it is not overridden. let output = build_script_outputs.get(metadata)?; let s = format!( @@ -1708,8 +1725,8 @@ fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> { } /// Prepare for work when a package starts to build -pub fn prepare_init(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> { - let new1 = cx.files().fingerprint_dir(unit); +pub fn prepare_init(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResult<()> { + let new1 = build_runner.files().fingerprint_dir(unit); // Doc tests have no output, thus no fingerprint. if !new1.exists() && !unit.mode.is_doc_test() { @@ -1721,14 +1738,14 @@ pub fn prepare_init(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> { /// Returns the location that the dep-info file will show up at /// for the [`Unit`] specified. -pub fn dep_info_loc(cx: &mut Context<'_, '_>, unit: &Unit) -> PathBuf { - cx.files().fingerprint_file_path(unit, "dep-") +pub fn dep_info_loc(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> PathBuf { + build_runner.files().fingerprint_file_path(unit, "dep-") } /// Returns an absolute path that target directory. /// All paths are rewritten to be relative to this. -fn target_root(cx: &Context<'_, '_>) -> PathBuf { - cx.bcx.ws.target_dir().into_path_unlocked() +fn target_root(build_runner: &BuildRunner<'_, '_>) -> PathBuf { + build_runner.bcx.ws.target_dir().into_path_unlocked() } /// Reads the value from the old fingerprint hash file and compare. diff --git a/src/cargo/core/compiler/future_incompat.rs b/src/cargo/core/compiler/future_incompat.rs index 3a50e2fcfa5..7486138b77f 100644 --- a/src/cargo/core/compiler/future_incompat.rs +++ b/src/cargo/core/compiler/future_incompat.rs @@ -169,7 +169,7 @@ impl OnDiskReports { .target_dir() .open_rw_exclusive_create( FUTURE_INCOMPAT_FILE, - ws.config(), + ws.gctx(), "Future incompatibility report", ) .and_then(|file| { @@ -182,7 +182,7 @@ impl OnDiskReports { crate::display_warning_with_error( "failed to write on-disk future incompatible report", &e, - &mut ws.config().shell(), + &mut ws.gctx().shell(), ); } @@ -193,7 +193,7 @@ impl OnDiskReports { pub fn load(ws: &Workspace<'_>) -> CargoResult { let report_file = match ws.target_dir().open_ro_shared( FUTURE_INCOMPAT_FILE, - ws.config(), + ws.gctx(), "Future incompatible report", ) { Ok(r) => r, @@ -299,11 +299,11 @@ fn render_report(per_package_reports: &[FutureIncompatReportPackage]) -> BTreeMa fn get_updates(ws: &Workspace<'_>, package_ids: &BTreeSet) -> Option { // This in general ignores all errors since this is opportunistic. let _lock = ws - .config() + .gctx() .acquire_package_cache_lock(CacheLockMode::DownloadExclusive) .ok()?; // Create a set of updated registry sources. - let map = SourceConfigMap::new(ws.config()).ok()?; + let map = SourceConfigMap::new(ws.gctx()).ok()?; let mut package_ids: BTreeSet<_> = package_ids .iter() .filter(|pkg_id| pkg_id.source_id().is_registry()) @@ -373,13 +373,13 @@ pub fn save_and_display_report( bcx: &BuildContext<'_, '_>, per_package_future_incompat_reports: &[FutureIncompatReportPackage], ) { - let should_display_message = match bcx.config.future_incompat_config() { + let should_display_message = match bcx.gctx.future_incompat_config() { Ok(config) => config.should_display_message(), Err(e) => { crate::display_warning_with_error( "failed to read future-incompat config from disk", &e, - &mut bcx.config.shell(), + &mut bcx.gctx.shell(), ); true } @@ -390,7 +390,7 @@ pub fn save_and_display_report( // `should_display_message` from the config file if bcx.build_config.future_incompat_report { drop( - bcx.config + bcx.gctx .shell() .note("0 dependencies had future-incompatible warnings"), ); @@ -418,7 +418,7 @@ pub fn save_and_display_report( let package_vers: Vec<_> = package_ids.iter().map(|pid| pid.to_string()).collect(); if should_display_message || bcx.build_config.future_incompat_report { - drop(bcx.config.shell().warn(&format!( + drop(bcx.gctx.shell().warn(&format!( "the following packages contain code that will be rejected by a future \ version of Rust: {}", package_vers.join(", ") @@ -488,14 +488,14 @@ https://doc.rust-lang.org/cargo/reference/overriding-dependencies.html#the-patch ); if bcx.build_config.future_incompat_report { - drop(bcx.config.shell().note(&suggestion_message)); - drop(bcx.config.shell().note(&format!( + drop(bcx.gctx.shell().note(&suggestion_message)); + drop(bcx.gctx.shell().note(&format!( "this report can be shown with `cargo report \ future-incompatibilities --id {}`", saved_report_id ))); } else if should_display_message { - drop(bcx.config.shell().note(&format!( + drop(bcx.gctx.shell().note(&format!( "to see what the problems were, use the option \ `--future-incompat-report`, or run `cargo report \ future-incompatibilities --id {}`", diff --git a/src/cargo/core/compiler/job_queue/job_state.rs b/src/cargo/core/compiler/job_queue/job_state.rs index a513d3b8986..fe3a79adb98 100644 --- a/src/cargo/core/compiler/job_queue/job_state.rs +++ b/src/cargo/core/compiler/job_queue/job_state.rs @@ -4,7 +4,7 @@ use std::{cell::Cell, marker, sync::Arc}; use cargo_util::ProcessBuilder; -use crate::core::compiler::context::OutputFile; +use crate::core::compiler::build_runner::OutputFile; use crate::core::compiler::future_incompat::FutureBreakageItem; use crate::util::Queue; use crate::CargoResult; @@ -16,7 +16,7 @@ use super::{Artifact, DiagDedupe, Job, JobId, Message}; /// /// The job may execute on either a dedicated thread or the main thread. If the job executes on the /// main thread, the `output` field must be set to prevent a deadlock. -pub struct JobState<'a, 'cfg> { +pub struct JobState<'a, 'gctx> { /// Channel back to the main thread to coordinate messages and such. /// /// When the `output` field is `Some`, care must be taken to avoid calling `push_bounded` on @@ -33,7 +33,7 @@ pub struct JobState<'a, 'cfg> { /// interleaved. In the future, it may be wrapped in a `Mutex` instead. In this case /// interleaving is still prevented as the lock would be held for the whole printing of an /// output message. - output: Option<&'a DiagDedupe<'cfg>>, + output: Option<&'a DiagDedupe<'gctx>>, /// The job id that this state is associated with, used when sending /// messages back to the main thread. @@ -49,11 +49,11 @@ pub struct JobState<'a, 'cfg> { _marker: marker::PhantomData<&'a ()>, } -impl<'a, 'cfg> JobState<'a, 'cfg> { +impl<'a, 'gctx> JobState<'a, 'gctx> { pub(super) fn new( id: JobId, messages: Arc>, - output: Option<&'a DiagDedupe<'cfg>>, + output: Option<&'a DiagDedupe<'gctx>>, rmeta_required: bool, ) -> Self { Self { @@ -81,7 +81,7 @@ impl<'a, 'cfg> JobState<'a, 'cfg> { pub fn stdout(&self, stdout: String) -> CargoResult<()> { if let Some(dedupe) = self.output { - writeln!(dedupe.config.shell().out(), "{}", stdout)?; + writeln!(dedupe.gctx.shell().out(), "{}", stdout)?; } else { self.messages.push_bounded(Message::Stdout(stdout)); } @@ -90,7 +90,7 @@ impl<'a, 'cfg> JobState<'a, 'cfg> { pub fn stderr(&self, stderr: String) -> CargoResult<()> { if let Some(dedupe) = self.output { - let mut shell = dedupe.config.shell(); + let mut shell = dedupe.gctx.shell(); shell.print_ansi_stderr(stderr.as_bytes())?; shell.err().write_all(b"\n")?; } else { diff --git a/src/cargo/core/compiler/job_queue/mod.rs b/src/cargo/core/compiler/job_queue/mod.rs index 50bcddf7e92..dc34c92a7ae 100644 --- a/src/cargo/core/compiler/job_queue/mod.rs +++ b/src/cargo/core/compiler/job_queue/mod.rs @@ -131,9 +131,9 @@ use tracing::{debug, trace}; pub use self::job::Freshness::{self, Dirty, Fresh}; pub use self::job::{Job, Work}; pub use self::job_state::JobState; -use super::context::OutputFile; +use super::build_runner::OutputFile; use super::timings::Timings; -use super::{BuildContext, BuildPlan, CompileMode, Context, Unit}; +use super::{BuildContext, BuildPlan, BuildRunner, CompileMode, Unit}; use crate::core::compiler::descriptive_pkg_name; use crate::core::compiler::future_incompat::{ self, FutureBreakageItem, FutureIncompatReportPackage, @@ -145,16 +145,16 @@ use crate::util::errors::AlreadyPrintedError; use crate::util::machine_message::{self, Message as _}; use crate::util::CargoResult; use crate::util::{self, internal, profile}; -use crate::util::{Config, DependencyQueue, Progress, ProgressStyle, Queue}; +use crate::util::{DependencyQueue, GlobalContext, Progress, ProgressStyle, Queue}; /// This structure is backed by the `DependencyQueue` type and manages the /// queueing of compilation steps for each package. Packages enqueue units of /// work and then later on the entire graph is converted to DrainState and /// executed. -pub struct JobQueue<'cfg> { +pub struct JobQueue<'gctx> { queue: DependencyQueue, counts: HashMap, - timings: Timings<'cfg>, + timings: Timings<'gctx>, } /// This structure is backed by the `DependencyQueue` type and manages the @@ -163,14 +163,14 @@ pub struct JobQueue<'cfg> { /// /// It is created from JobQueue when we have fully assembled the crate graph /// (i.e., all package dependencies are known). -struct DrainState<'cfg> { +struct DrainState<'gctx> { // This is the length of the DependencyQueue when starting out total_units: usize, queue: DependencyQueue, messages: Arc>, /// Diagnostic deduplication support. - diag_dedupe: DiagDedupe<'cfg>, + diag_dedupe: DiagDedupe<'gctx>, /// Count of warnings, used to print a summary after the job succeeds warning_count: HashMap, active: HashMap, @@ -178,9 +178,9 @@ struct DrainState<'cfg> { documented: HashSet, scraped: HashSet, counts: HashMap, - progress: Progress<'cfg>, + progress: Progress<'gctx>, next_id: u32, - timings: Timings<'cfg>, + timings: Timings<'gctx>, /// Tokens that are currently owned by this Cargo, and may be "associated" /// with a rustc process. They may also be unused, though if so will be @@ -195,7 +195,7 @@ struct DrainState<'cfg> { /// retrieved from the `queue`. We eagerly pull jobs off the main queue to /// allow us to request jobserver tokens pretty early. pending_queue: Vec<(Unit, Job, usize)>, - print: DiagnosticPrinter<'cfg>, + print: DiagnosticPrinter<'gctx>, /// How many jobs we've finished finished: usize, @@ -291,16 +291,16 @@ impl std::fmt::Display for JobId { } /// Handler for deduplicating diagnostics. -struct DiagDedupe<'cfg> { +struct DiagDedupe<'gctx> { seen: RefCell>, - config: &'cfg Config, + gctx: &'gctx GlobalContext, } -impl<'cfg> DiagDedupe<'cfg> { - fn new(config: &'cfg Config) -> Self { +impl<'gctx> DiagDedupe<'gctx> { + fn new(gctx: &'gctx GlobalContext) -> Self { DiagDedupe { seen: RefCell::new(HashSet::new()), - config, + gctx, } } @@ -313,7 +313,7 @@ impl<'cfg> DiagDedupe<'cfg> { if !self.seen.borrow_mut().insert(h) { return Ok(false); } - let mut shell = self.config.shell(); + let mut shell = self.gctx.shell(); shell.print_ansi_stderr(diag.as_bytes())?; shell.err().write_all(b"\n")?; Ok(true) @@ -374,8 +374,8 @@ enum Message { FutureIncompatReport(JobId, Vec), } -impl<'cfg> JobQueue<'cfg> { - pub fn new(bcx: &BuildContext<'_, 'cfg>) -> JobQueue<'cfg> { +impl<'gctx> JobQueue<'gctx> { + pub fn new(bcx: &BuildContext<'_, 'gctx>) -> JobQueue<'gctx> { JobQueue { queue: DependencyQueue::new(), counts: HashMap::new(), @@ -383,8 +383,13 @@ impl<'cfg> JobQueue<'cfg> { } } - pub fn enqueue(&mut self, cx: &Context<'_, 'cfg>, unit: &Unit, job: Job) -> CargoResult<()> { - let dependencies = cx.unit_deps(unit); + pub fn enqueue( + &mut self, + build_runner: &BuildRunner<'_, 'gctx>, + unit: &Unit, + job: Job, + ) -> CargoResult<()> { + let dependencies = build_runner.unit_deps(unit); let mut queue_deps = dependencies .iter() .filter(|dep| { @@ -398,9 +403,9 @@ impl<'cfg> JobQueue<'cfg> { .map(|dep| { // Handle the case here where our `unit -> dep` dependency may // only require the metadata, not the full compilation to - // finish. Use the tables in `cx` to figure out what kind - // of artifact is associated with this dependency. - let artifact = if cx.only_requires_rmeta(unit, &dep.unit) { + // finish. Use the tables in `build_runner` to figure out what + // kind of artifact is associated with this dependency. + let artifact = if build_runner.only_requires_rmeta(unit, &dep.unit) { Artifact::Metadata } else { Artifact::All @@ -432,17 +437,17 @@ impl<'cfg> JobQueue<'cfg> { // transitively contains the `Metadata` edge. if unit.requires_upstream_objects() { for dep in dependencies { - depend_on_deps_of_deps(cx, &mut queue_deps, dep.unit.clone()); + depend_on_deps_of_deps(build_runner, &mut queue_deps, dep.unit.clone()); } fn depend_on_deps_of_deps( - cx: &Context<'_, '_>, + build_runner: &BuildRunner<'_, '_>, deps: &mut HashMap, unit: Unit, ) { - for dep in cx.unit_deps(&unit) { + for dep in build_runner.unit_deps(&unit) { if deps.insert(dep.unit.clone(), Artifact::All).is_none() { - depend_on_deps_of_deps(cx, deps, dep.unit.clone()); + depend_on_deps_of_deps(build_runner, deps, dep.unit.clone()); } } } @@ -462,11 +467,16 @@ impl<'cfg> JobQueue<'cfg> { /// This function will spawn off `config.jobs()` workers to build all of the /// necessary dependencies, in order. Freshness is propagated as far as /// possible along each dependency chain. - pub fn execute(mut self, cx: &mut Context<'_, '_>, plan: &mut BuildPlan) -> CargoResult<()> { + pub fn execute( + mut self, + build_runner: &mut BuildRunner<'_, '_>, + plan: &mut BuildPlan, + ) -> CargoResult<()> { let _p = profile::start("executing the job graph"); self.queue.queue_finished(); - let progress = Progress::with_style("Building", ProgressStyle::Ratio, cx.bcx.config); + let progress = + Progress::with_style("Building", ProgressStyle::Ratio, build_runner.bcx.gctx); let state = DrainState { total_units: self.queue.len(), queue: self.queue, @@ -475,7 +485,7 @@ impl<'cfg> JobQueue<'cfg> { // typical messages. If you change this, please update the test // caching_large_output, too. messages: Arc::new(Queue::new(100)), - diag_dedupe: DiagDedupe::new(cx.bcx.config), + diag_dedupe: DiagDedupe::new(build_runner.bcx.gctx), warning_count: HashMap::new(), active: HashMap::new(), compiled: HashSet::new(), @@ -487,14 +497,17 @@ impl<'cfg> JobQueue<'cfg> { timings: self.timings, tokens: Vec::new(), pending_queue: Vec::new(), - print: DiagnosticPrinter::new(cx.bcx.config, &cx.bcx.rustc().workspace_wrapper), + print: DiagnosticPrinter::new( + build_runner.bcx.gctx, + &build_runner.bcx.rustc().workspace_wrapper, + ), finished: 0, per_package_future_incompat_reports: Vec::new(), }; // Create a helper thread for acquiring jobserver tokens let messages = state.messages.clone(); - let helper = cx + let helper = build_runner .jobserver .clone() .into_helper_thread(move |token| { @@ -508,7 +521,7 @@ impl<'cfg> JobQueue<'cfg> { // It is important that this uses `push` instead of `push_bounded` for // now. If someone wants to fix this to be bounded, the `drop` // implementation needs to be changed to avoid possible deadlocks. - let _diagnostic_server = cx + let _diagnostic_server = build_runner .bcx .build_config .rustfix_diagnostic_server @@ -516,19 +529,19 @@ impl<'cfg> JobQueue<'cfg> { .take() .map(move |srv| srv.start(move |msg| messages.push(Message::FixDiagnostic(msg)))); - thread::scope( - move |scope| match state.drain_the_queue(cx, plan, scope, &helper) { + thread::scope(move |scope| { + match state.drain_the_queue(build_runner, plan, scope, &helper) { Some(err) => Err(err), None => Ok(()), - }, - ) + } + }) } } -impl<'cfg> DrainState<'cfg> { +impl<'gctx> DrainState<'gctx> { fn spawn_work_if_possible<'s>( &mut self, - cx: &mut Context<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, jobserver_helper: &HelperThread, scope: &'s Scope<'s, '_>, ) -> CargoResult<()> { @@ -560,14 +573,19 @@ impl<'cfg> DrainState<'cfg> { while self.has_extra_tokens() && !self.pending_queue.is_empty() { let (unit, job, _) = self.pending_queue.pop().unwrap(); *self.counts.get_mut(&unit.pkg.package_id()).unwrap() -= 1; - if !cx.bcx.build_config.build_plan { + if !build_runner.bcx.build_config.build_plan { // Print out some nice progress information. // NOTE: An error here will drop the job without starting it. // That should be OK, since we want to exit as soon as // possible during an error. - self.note_working_on(cx.bcx.config, cx.bcx.ws.root(), &unit, job.freshness())?; + self.note_working_on( + build_runner.bcx.gctx, + build_runner.bcx.ws.root(), + &unit, + job.freshness(), + )?; } - self.run(&unit, job, cx, scope); + self.run(&unit, job, build_runner, scope); } Ok(()) @@ -579,14 +597,15 @@ impl<'cfg> DrainState<'cfg> { fn handle_event( &mut self, - cx: &mut Context<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, plan: &mut BuildPlan, event: Message, ) -> Result<(), ErrorToHandle> { match event { Message::Run(id, cmd) => { - cx.bcx - .config + build_runner + .bcx + .gctx .shell() .verbose(|c| c.status("Running", &cmd))?; self.timings.unit_start(id, self.active[&id].clone()); @@ -595,10 +614,10 @@ impl<'cfg> DrainState<'cfg> { plan.update(&module_name, &cmd, &filenames)?; } Message::Stdout(out) => { - writeln!(cx.bcx.config.shell().out(), "{}", out)?; + writeln!(build_runner.bcx.gctx.shell().out(), "{}", out)?; } Message::Stderr(err) => { - let mut shell = cx.bcx.config.shell(); + let mut shell = build_runner.bcx.gctx.shell(); shell.print_ansi_stderr(err.as_bytes())?; shell.err().write_all(b"\n")?; } @@ -619,7 +638,7 @@ impl<'cfg> DrainState<'cfg> { } } Message::Warning { id, warning } => { - cx.bcx.config.shell().warn(warning)?; + build_runner.bcx.gctx.shell().warn(warning)?; self.bump_warning_count(id, true, false); } Message::WarningCount { @@ -640,9 +659,9 @@ impl<'cfg> DrainState<'cfg> { trace!("end: {:?}", id); self.finished += 1; self.report_warning_count( - cx.bcx.config, + build_runner.bcx.gctx, id, - &cx.bcx.rustc().workspace_wrapper, + &build_runner.bcx.rustc().workspace_wrapper, ); self.active.remove(&id).unwrap() } @@ -655,18 +674,19 @@ impl<'cfg> DrainState<'cfg> { }; debug!("end ({:?}): {:?}", unit, result); match result { - Ok(()) => self.finish(id, &unit, artifact, cx)?, - Err(_) if cx.bcx.unit_can_fail_for_docscraping(&unit) => { - cx.failed_scrape_units + Ok(()) => self.finish(id, &unit, artifact, build_runner)?, + Err(_) if build_runner.bcx.unit_can_fail_for_docscraping(&unit) => { + build_runner + .failed_scrape_units .lock() .unwrap() - .insert(cx.files().metadata(&unit)); + .insert(build_runner.files().metadata(&unit)); self.queue.finish(&unit, &artifact); } Err(error) => { let msg = "The following warnings were emitted during compilation:"; - self.emit_warnings(Some(msg), &unit, cx)?; - self.back_compat_notice(cx, &unit)?; + self.emit_warnings(Some(msg), &unit, build_runner)?; + self.back_compat_notice(build_runner, &unit)?; return Err(ErrorToHandle { error, print_always: true, @@ -720,7 +740,7 @@ impl<'cfg> DrainState<'cfg> { /// because it is important for the loop to carefully handle errors. fn drain_the_queue<'s>( mut self, - cx: &mut Context<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, plan: &mut BuildPlan, scope: &'s Scope<'s, '_>, jobserver_helper: &HelperThread, @@ -743,9 +763,9 @@ impl<'cfg> DrainState<'cfg> { // must be handled in such a way that the loop is still allowed to // drain event messages. loop { - if errors.count == 0 || cx.bcx.build_config.keep_going { - if let Err(e) = self.spawn_work_if_possible(cx, jobserver_helper, scope) { - self.handle_error(&mut cx.bcx.config.shell(), &mut errors, e); + if errors.count == 0 || build_runner.bcx.build_config.keep_going { + if let Err(e) = self.spawn_work_if_possible(build_runner, jobserver_helper, scope) { + self.handle_error(&mut build_runner.bcx.gctx.shell(), &mut errors, e); } } @@ -761,14 +781,14 @@ impl<'cfg> DrainState<'cfg> { // don't actually use, and if this happens just relinquish it back // to the jobserver itself. for event in self.wait_for_events() { - if let Err(event_err) = self.handle_event(cx, plan, event) { - self.handle_error(&mut cx.bcx.config.shell(), &mut errors, event_err); + if let Err(event_err) = self.handle_event(build_runner, plan, event) { + self.handle_error(&mut build_runner.bcx.gctx.shell(), &mut errors, event_err); } } } self.progress.clear(); - let profile_name = cx.bcx.build_config.requested_profile; + let profile_name = build_runner.bcx.build_config.requested_profile; // NOTE: this may be a bit inaccurate, since this may not display the // profile for what was actually built. Profile overrides can change // these settings, and in some cases different targets are built with @@ -776,7 +796,7 @@ impl<'cfg> DrainState<'cfg> { // list of Units built, and maybe display a list of the different // profiles used. However, to keep it simple and compatible with old // behavior, we just display what the base profile is. - let profile = cx.bcx.profiles.base_profile(); + let profile = build_runner.bcx.profiles.base_profile(); let mut opt_type = String::from(if profile.opt_level.as_str() == "0" { "unoptimized" } else { @@ -786,12 +806,12 @@ impl<'cfg> DrainState<'cfg> { opt_type += " + debuginfo"; } - let time_elapsed = util::elapsed(cx.bcx.config.creation_time().elapsed()); - if let Err(e) = self.timings.finished(cx, &errors.to_error()) { - self.handle_error(&mut cx.bcx.config.shell(), &mut errors, e); + let time_elapsed = util::elapsed(build_runner.bcx.gctx.creation_time().elapsed()); + if let Err(e) = self.timings.finished(build_runner, &errors.to_error()) { + self.handle_error(&mut build_runner.bcx.gctx.shell(), &mut errors, e); } - if cx.bcx.build_config.emit_json() { - let mut shell = cx.bcx.config.shell(); + if build_runner.bcx.build_config.emit_json() { + let mut shell = build_runner.bcx.gctx.shell(); let msg = machine_message::BuildFinished { success: errors.count == 0, } @@ -806,7 +826,7 @@ impl<'cfg> DrainState<'cfg> { // `display_error` inside `handle_error`. Some(anyhow::Error::new(AlreadyPrintedError::new(error))) } else if self.queue.is_empty() && self.pending_queue.is_empty() { - let profile_link = cx.bcx.config.shell().err_hyperlink( + let profile_link = build_runner.bcx.gctx.shell().err_hyperlink( "https://doc.rust-lang.org/cargo/reference/profiles.html#default-profiles", ); let message = format!( @@ -814,11 +834,11 @@ impl<'cfg> DrainState<'cfg> { profile_link.open(), profile_link.close() ); - if !cx.bcx.build_config.build_plan { + if !build_runner.bcx.build_config.build_plan { // It doesn't really matter if this fails. - let _ = cx.bcx.config.shell().status("Finished", message); + let _ = build_runner.bcx.gctx.shell().status("Finished", message); future_incompat::save_and_display_report( - cx.bcx, + build_runner.bcx, &self.per_package_future_incompat_reports, ); } @@ -907,7 +927,13 @@ impl<'cfg> DrainState<'cfg> { /// /// Fresh jobs block until finished (which should be very fast!), Dirty /// jobs will spawn a thread in the background and return immediately. - fn run<'s>(&mut self, unit: &Unit, job: Job, cx: &Context<'_, '_>, scope: &'s Scope<'s, '_>) { + fn run<'s>( + &mut self, + unit: &Unit, + job: Job, + build_runner: &BuildRunner<'_, '_>, + scope: &'s Scope<'s, '_>, + ) { let id = JobId(self.next_id); self.next_id = self.next_id.checked_add(1).unwrap(); @@ -917,7 +943,7 @@ impl<'cfg> DrainState<'cfg> { let messages = self.messages.clone(); let is_fresh = job.freshness().is_fresh(); - let rmeta_required = cx.rmeta_required(unit); + let rmeta_required = build_runner.rmeta_required(unit); let doit = move |diag_dedupe| { let state = JobState::new(id, messages, diag_dedupe, rmeta_required); @@ -942,29 +968,29 @@ impl<'cfg> DrainState<'cfg> { &mut self, msg: Option<&str>, unit: &Unit, - cx: &mut Context<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, ) -> CargoResult<()> { - let outputs = cx.build_script_outputs.lock().unwrap(); - let Some(metadata) = cx.find_build_script_metadata(unit) else { + let outputs = build_runner.build_script_outputs.lock().unwrap(); + let Some(metadata) = build_runner.find_build_script_metadata(unit) else { return Ok(()); }; - let bcx = &mut cx.bcx; + let bcx = &mut build_runner.bcx; if let Some(output) = outputs.get(metadata) { if !output.warnings.is_empty() { if let Some(msg) = msg { - writeln!(bcx.config.shell().err(), "{}\n", msg)?; + writeln!(bcx.gctx.shell().err(), "{}\n", msg)?; } for warning in output.warnings.iter() { let warning_with_package = format!("{}@{}: {}", unit.pkg.name(), unit.pkg.version(), warning); - bcx.config.shell().warn(warning_with_package)?; + bcx.gctx.shell().warn(warning_with_package)?; } if msg.is_some() { // Output an empty line. - writeln!(bcx.config.shell().err())?; + writeln!(bcx.gctx.shell().err())?; } } } @@ -995,7 +1021,7 @@ impl<'cfg> DrainState<'cfg> { /// Displays a final report of the warnings emitted by a particular job. fn report_warning_count( &mut self, - config: &Config, + gctx: &GlobalContext, id: JobId, rustc_workspace_wrapper: &Option, ) { @@ -1064,7 +1090,7 @@ impl<'cfg> DrainState<'cfg> { } // Errors are ignored here because it is tricky to handle them // correctly, and they aren't important. - let _ = config.shell().warn(message); + let _ = gctx.shell().warn(message); } fn finish( @@ -1072,10 +1098,10 @@ impl<'cfg> DrainState<'cfg> { id: JobId, unit: &Unit, artifact: Artifact, - cx: &mut Context<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, ) -> CargoResult<()> { - if unit.mode.is_run_custom_build() && unit.show_warnings(cx.bcx.config) { - self.emit_warnings(None, unit, cx)?; + if unit.mode.is_run_custom_build() && unit.show_warnings(build_runner.bcx.gctx) { + self.emit_warnings(None, unit, build_runner)?; } let unlocked = self.queue.finish(unit, &artifact); match artifact { @@ -1096,7 +1122,7 @@ impl<'cfg> DrainState<'cfg> { // out any more information for a package after we've printed it once. fn note_working_on( &mut self, - config: &Config, + gctx: &GlobalContext, ws_root: &Path, unit: &Unit, fresh: &Freshness, @@ -1115,25 +1141,24 @@ impl<'cfg> DrainState<'cfg> { // being a compiled package. Dirty(dirty_reason) => { if !dirty_reason.is_fresh_build() { - config - .shell() + gctx.shell() .verbose(|shell| dirty_reason.present_to(shell, unit, ws_root))?; } if unit.mode.is_doc() { self.documented.insert(unit.pkg.package_id()); - config.shell().status("Documenting", &unit.pkg)?; + gctx.shell().status("Documenting", &unit.pkg)?; } else if unit.mode.is_doc_test() { // Skip doc test. } else if unit.mode.is_doc_scrape() { self.scraped.insert(unit.pkg.package_id()); - config.shell().status("Scraping", &unit.pkg)?; + gctx.shell().status("Scraping", &unit.pkg)?; } else { self.compiled.insert(unit.pkg.package_id()); if unit.mode.is_check() { - config.shell().status("Checking", &unit.pkg)?; + gctx.shell().status("Checking", &unit.pkg)?; } else { - config.shell().status("Compiling", &unit.pkg)?; + gctx.shell().status("Compiling", &unit.pkg)?; } } } @@ -1143,23 +1168,27 @@ impl<'cfg> DrainState<'cfg> { && !(unit.mode.is_doc_test() && self.compiled.contains(&unit.pkg.package_id())) { self.compiled.insert(unit.pkg.package_id()); - config.shell().verbose(|c| c.status("Fresh", &unit.pkg))?; + gctx.shell().verbose(|c| c.status("Fresh", &unit.pkg))?; } } } Ok(()) } - fn back_compat_notice(&self, cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<()> { + fn back_compat_notice( + &self, + build_runner: &BuildRunner<'_, '_>, + unit: &Unit, + ) -> CargoResult<()> { if unit.pkg.name() != "diesel" || unit.pkg.version() >= &Version::new(1, 4, 8) - || cx.bcx.ws.resolve_behavior() == ResolveBehavior::V1 + || build_runner.bcx.ws.resolve_behavior() == ResolveBehavior::V1 || !unit.pkg.package_id().source_id().is_registry() || !unit.features.is_empty() { return Ok(()); } - if !cx + if !build_runner .bcx .unit_graph .keys() @@ -1167,7 +1196,7 @@ impl<'cfg> DrainState<'cfg> { { return Ok(()); } - cx.bcx.config.shell().note( + build_runner.bcx.gctx.shell().note( "\ This error may be due to an interaction between diesel and Cargo's new feature resolver. Try updating to diesel 1.4.8 to fix this error. diff --git a/src/cargo/core/compiler/layout.rs b/src/cargo/core/compiler/layout.rs index 57b65907ca2..30062164d3c 100644 --- a/src/cargo/core/compiler/layout.rs +++ b/src/cargo/core/compiler/layout.rs @@ -166,7 +166,7 @@ impl Layout { // For now we don't do any more finer-grained locking on the artifact // directory, so just lock the entire thing for the duration of this // compile. - let lock = dest.open_rw_exclusive_create(".cargo-lock", ws.config(), "build directory")?; + let lock = dest.open_rw_exclusive_create(".cargo-lock", ws.gctx(), "build directory")?; let root = root.into_path_unlocked(); let dest = dest.into_path_unlocked(); let deps = dest.join("deps"); diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index 19a77173e92..8aed78f315a 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -13,7 +13,7 @@ //! //! * [`BuildContext`] is a static context containing all information you need //! before a build gets started. -//! * [`Context`] is the center of the world, coordinating a running build and +//! * [`BuildRunner`] is the center of the world, coordinating a running build and //! collecting information from it. //! * [`custom_build`] is the home of build script executions and output parsing. //! * [`fingerprint`] not only defines but also executes a set of rules to @@ -35,9 +35,9 @@ pub mod artifact; mod build_config; pub(crate) mod build_context; mod build_plan; +pub(crate) mod build_runner; mod compilation; mod compile_kind; -pub(crate) mod context; mod crate_type; mod custom_build; pub(crate) mod fingerprint; @@ -72,9 +72,9 @@ pub use self::build_context::{ BuildContext, FileFlavor, FileType, RustDocFingerprint, RustcTargetData, TargetInfo, }; use self::build_plan::BuildPlan; +pub use self::build_runner::{BuildRunner, Metadata}; pub use self::compilation::{Compilation, Doctest, UnitOutput}; pub use self::compile_kind::{CompileKind, CompileTarget}; -pub use self::context::{Context, Metadata}; pub use self::crate_type::CrateType; pub use self::custom_build::LinkArgTarget; pub use self::custom_build::{BuildOutput, BuildScriptOutputs, BuildScripts}; @@ -108,7 +108,7 @@ pub trait Executor: Send + Sync + 'static { /// Called after a rustc process invocation is prepared up-front for a given /// unit of work (may still be modified for runtime-known dependencies, when /// the work is actually executed). - fn init(&self, _cx: &Context<'_, '_>, _unit: &Unit) {} + fn init(&self, _build_runner: &BuildRunner<'_, '_>, _unit: &Unit) {} /// In case of an `Err`, Cargo will not continue with the build process for /// this package. @@ -158,42 +158,45 @@ impl Executor for DefaultExecutor { /// Note that **no actual work is executed as part of this**, that's all done /// next as part of [`JobQueue::execute`] function which will run everything /// in order with proper parallelism. -fn compile<'cfg>( - cx: &mut Context<'_, 'cfg>, - jobs: &mut JobQueue<'cfg>, +fn compile<'gctx>( + build_runner: &mut BuildRunner<'_, 'gctx>, + jobs: &mut JobQueue<'gctx>, plan: &mut BuildPlan, unit: &Unit, exec: &Arc, force_rebuild: bool, ) -> CargoResult<()> { - let bcx = cx.bcx; + let bcx = build_runner.bcx; let build_plan = bcx.build_config.build_plan; - if !cx.compiled.insert(unit.clone()) { + if !build_runner.compiled.insert(unit.clone()) { return Ok(()); } // Build up the work to be done to compile this unit, enqueuing it once // we've got everything constructed. let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name())); - fingerprint::prepare_init(cx, unit)?; + fingerprint::prepare_init(build_runner, unit)?; let job = if unit.mode.is_run_custom_build() { - custom_build::prepare(cx, unit)? + custom_build::prepare(build_runner, unit)? } else if unit.mode.is_doc_test() { // We run these targets later, so this is just a no-op for now. Job::new_fresh() } else if build_plan { - Job::new_dirty(rustc(cx, unit, &exec.clone())?, DirtyReason::FreshBuild) + Job::new_dirty( + rustc(build_runner, unit, &exec.clone())?, + DirtyReason::FreshBuild, + ) } else { let force = exec.force_rebuild(unit) || force_rebuild; - let mut job = fingerprint::prepare_target(cx, unit, force)?; + let mut job = fingerprint::prepare_target(build_runner, unit, force)?; job.before(if job.freshness().is_dirty() { let work = if unit.mode.is_doc() || unit.mode.is_doc_scrape() { - rustdoc(cx, unit)? + rustdoc(build_runner, unit)? } else { - rustc(cx, unit, exec)? + rustc(build_runner, unit, exec)? }; - work.then(link_targets(cx, unit, false)?) + work.then(link_targets(build_runner, unit, false)?) } else { // We always replay the output cache, // since it might contain future-incompat-report messages @@ -201,26 +204,26 @@ fn compile<'cfg>( unit.pkg.package_id(), PathBuf::from(unit.pkg.manifest_path()), &unit.target, - cx.files().message_cache_path(unit), - cx.bcx.build_config.message_format, - unit.show_warnings(bcx.config), + build_runner.files().message_cache_path(unit), + build_runner.bcx.build_config.message_format, + unit.show_warnings(bcx.gctx), ); // Need to link targets on both the dirty and fresh. - work.then(link_targets(cx, unit, true)?) + work.then(link_targets(build_runner, unit, true)?) }); job }; - jobs.enqueue(cx, unit, job)?; + jobs.enqueue(build_runner, unit, job)?; drop(p); // Be sure to compile all dependencies of this target as well. - let deps = Vec::from(cx.unit_deps(unit)); // Create vec due to mutable borrow. + let deps = Vec::from(build_runner.unit_deps(unit)); // Create vec due to mutable borrow. for dep in deps { - compile(cx, jobs, plan, &dep.unit, exec, false)?; + compile(build_runner, jobs, plan, &dep.unit, exec, false)?; } if build_plan { - plan.add(cx, unit)?; + plan.add(build_runner, unit)?; } Ok(()) @@ -229,13 +232,13 @@ fn compile<'cfg>( /// Generates the warning message used when fallible doc-scrape units fail, /// either for rustdoc or rustc. fn make_failed_scrape_diagnostic( - cx: &Context<'_, '_>, + build_runner: &BuildRunner<'_, '_>, unit: &Unit, top_line: impl Display, ) -> String { let manifest_path = unit.pkg.manifest_path(); let relative_manifest_path = manifest_path - .strip_prefix(cx.bcx.ws.root()) + .strip_prefix(build_runner.bcx.ws.root()) .unwrap_or(&manifest_path); format!( @@ -248,65 +251,72 @@ fn make_failed_scrape_diagnostic( } /// Creates a unit of work invoking `rustc` for building the `unit`. -fn rustc(cx: &mut Context<'_, '_>, unit: &Unit, exec: &Arc) -> CargoResult { - let mut rustc = prepare_rustc(cx, unit)?; - let build_plan = cx.bcx.build_config.build_plan; +fn rustc( + build_runner: &mut BuildRunner<'_, '_>, + unit: &Unit, + exec: &Arc, +) -> CargoResult { + let mut rustc = prepare_rustc(build_runner, unit)?; + let build_plan = build_runner.bcx.build_config.build_plan; let name = unit.pkg.name(); let buildkey = unit.buildkey(); - let outputs = cx.outputs(unit)?; - let root = cx.files().out_dir(unit); + let outputs = build_runner.outputs(unit)?; + let root = build_runner.files().out_dir(unit); // Prepare the native lib state (extra `-L` and `-l` flags). - let build_script_outputs = Arc::clone(&cx.build_script_outputs); + let build_script_outputs = Arc::clone(&build_runner.build_script_outputs); let current_id = unit.pkg.package_id(); let manifest_path = PathBuf::from(unit.pkg.manifest_path()); - let build_scripts = cx.build_scripts.get(unit).cloned(); + let build_scripts = build_runner.build_scripts.get(unit).cloned(); // If we are a binary and the package also contains a library, then we // don't pass the `-l` flags. let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib()); - let dep_info_name = if cx.files().use_extra_filename(unit) { + let dep_info_name = if build_runner.files().use_extra_filename(unit) { format!( "{}-{}.d", unit.target.crate_name(), - cx.files().metadata(unit) + build_runner.files().metadata(unit) ) } else { format!("{}.d", unit.target.crate_name()) }; let rustc_dep_info_loc = root.join(dep_info_name); - let dep_info_loc = fingerprint::dep_info_loc(cx, unit); + let dep_info_loc = fingerprint::dep_info_loc(build_runner, unit); - let mut output_options = OutputOptions::new(cx, unit); + let mut output_options = OutputOptions::new(build_runner, unit); let package_id = unit.pkg.package_id(); let target = Target::clone(&unit.target); let mode = unit.mode; - exec.init(cx, unit); + exec.init(build_runner, unit); let exec = exec.clone(); - let root_output = cx.files().host_dest().to_path_buf(); - let target_dir = cx.bcx.ws.target_dir().into_path_unlocked(); + let root_output = build_runner.files().host_dest().to_path_buf(); + let target_dir = build_runner.bcx.ws.target_dir().into_path_unlocked(); let pkg_root = unit.pkg.root().to_path_buf(); let cwd = rustc .get_cwd() - .unwrap_or_else(|| cx.bcx.config.cwd()) + .unwrap_or_else(|| build_runner.bcx.gctx.cwd()) .to_path_buf(); - let fingerprint_dir = cx.files().fingerprint_dir(unit); - let script_metadata = cx.find_build_script_metadata(unit); + let fingerprint_dir = build_runner.files().fingerprint_dir(unit); + let script_metadata = build_runner.find_build_script_metadata(unit); let is_local = unit.is_local(); let artifact = unit.artifact; - let hide_diagnostics_for_scrape_unit = cx.bcx.unit_can_fail_for_docscraping(unit) - && !matches!(cx.bcx.config.shell().verbosity(), Verbosity::Verbose); + let hide_diagnostics_for_scrape_unit = build_runner.bcx.unit_can_fail_for_docscraping(unit) + && !matches!( + build_runner.bcx.gctx.shell().verbosity(), + Verbosity::Verbose + ); let failed_scrape_diagnostic = hide_diagnostics_for_scrape_unit.then(|| { // If this unit is needed for doc-scraping, then we generate a diagnostic that // describes the set of reverse-dependencies that cause the unit to be needed. let target_desc = unit.target.description_named(); - let mut for_scrape_units = cx + let mut for_scrape_units = build_runner .bcx .scrape_units_have_dep_on(unit) .into_iter() @@ -314,7 +324,7 @@ fn rustc(cx: &mut Context<'_, '_>, unit: &Unit, exec: &Arc) -> Car .collect::>(); for_scrape_units.sort(); let for_scrape_units = for_scrape_units.join(", "); - make_failed_scrape_diagnostic(cx, unit, format_args!("failed to check {target_desc} in package `{name}` as a prerequisite for scraping examples from: {for_scrape_units}")) + make_failed_scrape_diagnostic(build_runner, unit, format_args!("failed to check {target_desc} in package `{name}` as a prerequisite for scraping examples from: {for_scrape_units}")) }); if hide_diagnostics_for_scrape_unit { output_options.show_diagnostics = false; @@ -512,21 +522,28 @@ fn verbose_if_simple_exit_code(err: Error) -> Error { /// Link the compiled target (often of form `foo-{metadata_hash}`) to the /// final target. This must happen during both "Fresh" and "Compile". -fn link_targets(cx: &mut Context<'_, '_>, unit: &Unit, fresh: bool) -> CargoResult { - let bcx = cx.bcx; - let outputs = cx.outputs(unit)?; - let export_dir = cx.files().export_dir(); +fn link_targets( + build_runner: &mut BuildRunner<'_, '_>, + unit: &Unit, + fresh: bool, +) -> CargoResult { + let bcx = build_runner.bcx; + let outputs = build_runner.outputs(unit)?; + let export_dir = build_runner.files().export_dir(); let package_id = unit.pkg.package_id(); let manifest_path = PathBuf::from(unit.pkg.manifest_path()); let profile = unit.profile.clone(); let unit_mode = unit.mode; let features = unit.features.iter().map(|s| s.to_string()).collect(); let json_messages = bcx.build_config.emit_json(); - let executable = cx.get_executable(unit)?; + let executable = build_runner.get_executable(unit)?; let mut target = Target::clone(&unit.target); if let TargetSourcePath::Metabuild = target.src_path() { // Give it something to serialize. - let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir()); + let path = unit + .pkg + .manifest() + .metabuild_path(build_runner.bcx.ws.target_dir()); target.set_src_path(TargetSourcePath::Path(path)); } @@ -655,20 +672,20 @@ where /// This builds a static view of the invocation. Flags depending on the /// completion of other units will be added later in runtime, such as flags /// from build scripts. -fn prepare_rustc(cx: &Context<'_, '_>, unit: &Unit) -> CargoResult { - let is_primary = cx.is_primary_package(unit); - let is_workspace = cx.bcx.ws.is_member(&unit.pkg); +fn prepare_rustc(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoResult { + let is_primary = build_runner.is_primary_package(unit); + let is_workspace = build_runner.bcx.ws.is_member(&unit.pkg); - let mut base = cx + let mut base = build_runner .compilation .rustc_process(unit, is_primary, is_workspace)?; - build_base_args(cx, &mut base, unit)?; + build_base_args(build_runner, &mut base, unit)?; - base.inherit_jobserver(&cx.jobserver); - build_deps_args(&mut base, cx, unit)?; - add_cap_lints(cx.bcx, unit, &mut base); - base.args(cx.bcx.rustflags_args(unit)); - if cx.bcx.config.cli_unstable().binary_dep_depinfo { + base.inherit_jobserver(&build_runner.jobserver); + build_deps_args(&mut base, build_runner, unit)?; + add_cap_lints(build_runner.bcx, unit, &mut base); + base.args(build_runner.bcx.rustflags_args(unit)); + if build_runner.bcx.gctx.cli_unstable().binary_dep_depinfo { base.arg("-Z").arg("binary-dep-depinfo"); } @@ -677,10 +694,10 @@ fn prepare_rustc(cx: &Context<'_, '_>, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult { - let bcx = cx.bcx; +fn prepare_rustdoc(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoResult { + let bcx = build_runner.bcx; // script_metadata is not needed here, it is only for tests. - let mut rustdoc = cx.compilation.rustdoc_process(unit, None)?; - rustdoc.inherit_jobserver(&cx.jobserver); + let mut rustdoc = build_runner.compilation.rustdoc_process(unit, None)?; + rustdoc.inherit_jobserver(&build_runner.jobserver); let crate_name = unit.target.crate_name(); rustdoc.arg("--crate-name").arg(&crate_name); add_path_args(bcx.ws, unit, &mut rustdoc); @@ -713,24 +730,24 @@ fn prepare_rustdoc(cx: &Context<'_, '_>, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult { - let mut rustdoc = prepare_rustdoc(cx, unit)?; +fn rustdoc(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResult { + let mut rustdoc = prepare_rustdoc(build_runner, unit)?; let crate_name = unit.target.crate_name(); - let doc_dir = cx.files().out_dir(unit); + let doc_dir = build_runner.files().out_dir(unit); // Create the documentation directory ahead of time as rustdoc currently has // a bug where concurrent invocations will race to create this directory if // it doesn't already exist. @@ -786,30 +803,39 @@ fn rustdoc(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { let target_desc = unit.target.description_named(); let name = unit.pkg.name(); - let build_script_outputs = Arc::clone(&cx.build_script_outputs); + let build_script_outputs = Arc::clone(&build_runner.build_script_outputs); let package_id = unit.pkg.package_id(); let manifest_path = PathBuf::from(unit.pkg.manifest_path()); let target = Target::clone(&unit.target); - let mut output_options = OutputOptions::new(cx, unit); - let script_metadata = cx.find_build_script_metadata(unit); - let scrape_outputs = if should_include_scrape_units(cx.bcx, unit) { + let mut output_options = OutputOptions::new(build_runner, unit); + let script_metadata = build_runner.find_build_script_metadata(unit); + let scrape_outputs = if should_include_scrape_units(build_runner.bcx, unit) { Some( - cx.bcx + build_runner + .bcx .scrape_units .iter() - .map(|unit| Ok((cx.files().metadata(unit), scrape_output_path(cx, unit)?))) + .map(|unit| { + Ok(( + build_runner.files().metadata(unit), + scrape_output_path(build_runner, unit)?, + )) + }) .collect::>>()?, ) } else { None }; - let failed_scrape_units = Arc::clone(&cx.failed_scrape_units); - let hide_diagnostics_for_scrape_unit = cx.bcx.unit_can_fail_for_docscraping(unit) - && !matches!(cx.bcx.config.shell().verbosity(), Verbosity::Verbose); + let failed_scrape_units = Arc::clone(&build_runner.failed_scrape_units); + let hide_diagnostics_for_scrape_unit = build_runner.bcx.unit_can_fail_for_docscraping(unit) + && !matches!( + build_runner.bcx.gctx.shell().verbosity(), + Verbosity::Verbose + ); let failed_scrape_diagnostic = hide_diagnostics_for_scrape_unit.then(|| { make_failed_scrape_diagnostic( - cx, + build_runner, unit, format_args!("failed to scan {target_desc} in package `{name}` for example code usage"), ) @@ -898,7 +924,7 @@ fn append_crate_version_flag(unit: &Unit, rustdoc: &mut ProcessBuilder) { fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit, cmd: &mut ProcessBuilder) { // If this is an upstream dep we don't want warnings from, turn off all // lints. - if !unit.show_warnings(bcx.config) { + if !unit.show_warnings(bcx.gctx) { cmd.arg("--cap-lints").arg("allow"); // If this is an upstream dep but we *do* want warnings, make sure that they @@ -911,8 +937,8 @@ fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit, cmd: &mut ProcessBuild /// Forwards [`-Zallow-features`] if it is set for cargo. /// /// [`-Zallow-features`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#allow-features -fn add_allow_features(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder) { - if let Some(allow) = &cx.bcx.config.cli_unstable().allow_features { +fn add_allow_features(build_runner: &BuildRunner<'_, '_>, cmd: &mut ProcessBuilder) { + if let Some(allow) = &build_runner.bcx.gctx.cli_unstable().allow_features { use std::fmt::Write; let mut arg = String::from("-Zallow-features="); for f in allow { @@ -932,11 +958,11 @@ fn add_allow_features(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder) { /// which Cargo will extract and display to the user. /// /// [`--error-format`]: https://doc.rust-lang.org/nightly/rustc/command-line-arguments.html#--error-format-control-how-errors-are-produced -fn add_error_format_and_color(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder) { +fn add_error_format_and_color(build_runner: &BuildRunner<'_, '_>, cmd: &mut ProcessBuilder) { cmd.arg("--error-format=json"); let mut json = String::from("--json=diagnostic-rendered-ansi,artifacts,future-incompat"); - match cx.bcx.build_config.message_format { + match build_runner.bcx.build_config.message_format { MessageFormat::Short | MessageFormat::Json { short: true, .. } => { json.push_str(",diagnostic-short"); } @@ -944,17 +970,21 @@ fn add_error_format_and_color(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder) { } cmd.arg(json); - let config = cx.bcx.config; - if let Some(width) = config.shell().err_width().diagnostic_terminal_width() { + let gctx = build_runner.bcx.gctx; + if let Some(width) = gctx.shell().err_width().diagnostic_terminal_width() { cmd.arg(format!("--diagnostic-width={width}")); } } /// Adds essential rustc flags and environment variables to the command to execute. -fn build_base_args(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder, unit: &Unit) -> CargoResult<()> { +fn build_base_args( + build_runner: &BuildRunner<'_, '_>, + cmd: &mut ProcessBuilder, + unit: &Unit, +) -> CargoResult<()> { assert!(!unit.mode.is_run_custom_build()); - let bcx = cx.bcx; + let bcx = build_runner.bcx; let Profile { ref opt_level, codegen_backend, @@ -979,8 +1009,8 @@ fn build_base_args(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder, unit: &Unit) edition.cmd_edition_arg(cmd); add_path_args(bcx.ws, unit, cmd); - add_error_format_and_color(cx, cmd); - add_allow_features(cx, cmd); + add_error_format_and_color(build_runner, cmd); + add_allow_features(build_runner, cmd); let mut contains_dy_lib = false; if !test { @@ -1002,7 +1032,7 @@ fn build_base_args(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder, unit: &Unit) } let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build()) - || (contains_dy_lib && !cx.is_primary_package(unit)); + || (contains_dy_lib && !build_runner.is_primary_package(unit)); if prefer_dynamic { cmd.arg("-C").arg("prefer-dynamic"); } @@ -1015,7 +1045,7 @@ fn build_base_args(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder, unit: &Unit) cmd.arg("-C").arg(format!("panic={}", panic)); } - cmd.args(<o_args(cx, unit)); + cmd.args(<o_args(build_runner, unit)); if let Some(backend) = codegen_backend { cmd.arg("-Z").arg(&format!("codegen-backend={}", backend)); @@ -1036,7 +1066,7 @@ fn build_base_args(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder, unit: &Unit) // Windows the only stable valid value for split-debuginfo is "packed", // while on Linux "unpacked" is also stable. if let Some(split) = split_debuginfo { - if cx + if build_runner .bcx .target_data .info(unit.kind) @@ -1048,12 +1078,12 @@ fn build_base_args(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder, unit: &Unit) } if let Some(trim_paths) = trim_paths { - trim_paths_args(cmd, cx, unit, &trim_paths)?; + trim_paths_args(cmd, build_runner, unit, &trim_paths)?; } cmd.args(unit.pkg.manifest().lint_rustflags()); cmd.args(&profile_rustflags); - if let Some(args) = cx.bcx.extra_args_for(unit) { + if let Some(args) = build_runner.bcx.extra_args_for(unit) { cmd.args(args); } @@ -1096,11 +1126,11 @@ fn build_base_args(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder, unit: &Unit) } cmd.args(&features_args(unit)); - cmd.args(&check_cfg_args(cx, unit)); + cmd.args(&check_cfg_args(build_runner, unit)); - let meta = cx.files().metadata(unit); + let meta = build_runner.files().metadata(unit); cmd.arg("-C").arg(&format!("metadata={}", meta)); - if cx.files().use_extra_filename(unit) { + if build_runner.files().use_extra_filename(unit) { cmd.arg("-C").arg(&format!("extra-filename=-{}", meta)); } @@ -1108,7 +1138,8 @@ fn build_base_args(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder, unit: &Unit) cmd.arg("-C").arg("rpath"); } - cmd.arg("--out-dir").arg(&cx.files().out_dir(unit)); + cmd.arg("--out-dir") + .arg(&build_runner.files().out_dir(unit)); fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str, val: Option<&OsStr>) { if let Some(val) = val { @@ -1126,13 +1157,18 @@ fn build_base_args(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder, unit: &Unit) cmd, "-C", "linker=", - cx.compilation + build_runner + .compilation .target_linker(unit.kind) .as_ref() .map(|s| s.as_ref()), ); if incremental { - let dir = cx.files().layout(unit.kind).incremental().as_os_str(); + let dir = build_runner + .files() + .layout(unit.kind) + .incremental() + .as_os_str(); opt(cmd, "-C", "incremental=", Some(dir)); } @@ -1161,9 +1197,11 @@ fn build_base_args(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder, unit: &Unit) .iter() .filter(|target| target.is_bin()) { - let exe_path = cx - .files() - .bin_link_for_target(bin_target, unit.kind, cx.bcx)?; + let exe_path = build_runner.files().bin_link_for_target( + bin_target, + unit.kind, + build_runner.bcx, + )?; let name = bin_target .binary_filename() .unwrap_or(bin_target.name().to_string()); @@ -1193,7 +1231,7 @@ fn features_args(unit: &Unit) -> Vec { /// [`-Ztrim-paths`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#profile-trim-paths-option fn trim_paths_args( cmd: &mut ProcessBuilder, - cx: &Context<'_, '_>, + build_runner: &BuildRunner<'_, '_>, unit: &Unit, trim_paths: &TomlTrimPaths, ) -> CargoResult<()> { @@ -1206,7 +1244,7 @@ fn trim_paths_args( cmd.arg(format!("-Zremap-path-scope={trim_paths}")); let sysroot_remap = { - let sysroot = &cx.bcx.target_data.info(unit.kind).sysroot; + let sysroot = &build_runner.bcx.target_data.info(unit.kind).sysroot; let mut remap = OsString::from("--remap-path-prefix="); remap.push(sysroot); remap.push("/lib/rustlib/src/rust"); // See also `detect_sysroot_src_path()`. @@ -1214,16 +1252,16 @@ fn trim_paths_args( remap.push("/rustc/"); // This remap logic aligns with rustc: // - if let Some(commit_hash) = cx.bcx.rustc().commit_hash.as_ref() { + if let Some(commit_hash) = build_runner.bcx.rustc().commit_hash.as_ref() { remap.push(commit_hash); } else { - remap.push(cx.bcx.rustc().version.to_string()); + remap.push(build_runner.bcx.rustc().version.to_string()); } remap }; let package_remap = { let pkg_root = unit.pkg.root(); - let ws_root = cx.bcx.ws.root(); + let ws_root = build_runner.bcx.ws.root(); let mut remap = OsString::from("--remap-path-prefix="); // Remap rules for dependencies // @@ -1234,10 +1272,22 @@ fn trim_paths_args( // * otherwise remapped to `-`. let source_id = unit.pkg.package_id().source_id(); if source_id.is_git() { - remap.push(cx.bcx.config.git_checkouts_path().as_path_unlocked()); + remap.push( + build_runner + .bcx + .gctx + .git_checkouts_path() + .as_path_unlocked(), + ); remap.push("="); } else if source_id.is_registry() { - remap.push(cx.bcx.config.registry_source_path().as_path_unlocked()); + remap.push( + build_runner + .bcx + .gctx + .registry_source_path() + .as_path_unlocked(), + ); remap.push("="); } else if pkg_root.strip_prefix(ws_root).is_ok() { remap.push(ws_root); @@ -1264,8 +1314,8 @@ fn trim_paths_args( /// See unstable feature [`check-cfg`]. /// /// [`check-cfg`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#check-cfg -fn check_cfg_args(cx: &Context<'_, '_>, unit: &Unit) -> Vec { - if cx.bcx.config.cli_unstable().check_cfg { +fn check_cfg_args(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> Vec { + if build_runner.bcx.gctx.cli_unstable().check_cfg { // The routine below generates the --check-cfg arguments. Our goals here are to // enable the checking of conditionals and pass the list of declared features. // @@ -1315,13 +1365,13 @@ fn check_cfg_args(cx: &Context<'_, '_>, unit: &Unit) -> Vec { } /// Adds LTO related codegen flags. -fn lto_args(cx: &Context<'_, '_>, unit: &Unit) -> Vec { +fn lto_args(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> Vec { let mut result = Vec::new(); let mut push = |arg: &str| { result.push(OsString::from("-C")); result.push(OsString::from(arg)); }; - match cx.lto[unit] { + match build_runner.lto[unit] { lto::Lto::Run(None) => push("lto"), lto::Lto::Run(Some(s)) => push(&format!("lto={}", s)), lto::Lto::Off => { @@ -1340,11 +1390,15 @@ fn lto_args(cx: &Context<'_, '_>, unit: &Unit) -> Vec { /// /// [`-L`]: https://doc.rust-lang.org/nightly/rustc/command-line-arguments.html#-l-add-a-directory-to-the-library-search-path /// [`--extern`]: https://doc.rust-lang.org/nightly/rustc/command-line-arguments.html#--extern-specify-where-an-external-library-is-located -fn build_deps_args(cmd: &mut ProcessBuilder, cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<()> { - let bcx = cx.bcx; +fn build_deps_args( + cmd: &mut ProcessBuilder, + build_runner: &BuildRunner<'_, '_>, + unit: &Unit, +) -> CargoResult<()> { + let bcx = build_runner.bcx; cmd.arg("-L").arg(&{ let mut deps = OsString::from("dependency="); - deps.push(cx.files().deps_dir(unit)); + deps.push(build_runner.files().deps_dir(unit)); deps }); @@ -1353,12 +1407,12 @@ fn build_deps_args(cmd: &mut ProcessBuilder, cx: &Context<'_, '_>, unit: &Unit) if !unit.kind.is_host() { cmd.arg("-L").arg(&{ let mut deps = OsString::from("dependency="); - deps.push(cx.files().host_deps()); + deps.push(build_runner.files().host_deps()); deps }); } - let deps = cx.unit_deps(unit); + let deps = build_runner.unit_deps(unit); // If there is not one linkable target but should, rustc fails later // on if there is an `extern crate` for it. This may turn into a hard @@ -1370,7 +1424,7 @@ fn build_deps_args(cmd: &mut ProcessBuilder, cx: &Context<'_, '_>, unit: &Unit) if let Some(dep) = deps.iter().find(|dep| { !dep.unit.mode.is_doc() && dep.unit.target.is_lib() && !dep.unit.artifact.is_true() }) { - bcx.config.shell().warn(format!( + bcx.gctx.shell().warn(format!( "The package `{}` \ provides no linkable target. The compiler might raise an error while compiling \ `{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \ @@ -1386,15 +1440,18 @@ fn build_deps_args(cmd: &mut ProcessBuilder, cx: &Context<'_, '_>, unit: &Unit) for dep in deps { if dep.unit.mode.is_run_custom_build() { - cmd.env("OUT_DIR", &cx.files().build_script_out_dir(&dep.unit)); + cmd.env( + "OUT_DIR", + &build_runner.files().build_script_out_dir(&dep.unit), + ); } } - for arg in extern_args(cx, unit, &mut unstable_opts)? { + for arg in extern_args(build_runner, unit, &mut unstable_opts)? { cmd.arg(arg); } - for (var, env) in artifact::get_env(cx, deps)? { + for (var, env) in artifact::get_env(build_runner, deps)? { cmd.env(&var, env); } @@ -1437,12 +1494,12 @@ fn add_custom_flags( /// Generates a list of `--extern` arguments. pub fn extern_args( - cx: &Context<'_, '_>, + build_runner: &BuildRunner<'_, '_>, unit: &Unit, unstable_opts: &mut bool, ) -> CargoResult> { let mut result = Vec::new(); - let deps = cx.unit_deps(unit); + let deps = build_runner.unit_deps(unit); // Closure to add one dependency to `result`. let mut link_to = @@ -1479,9 +1536,9 @@ pub fn extern_args( result.push(value); }; - let outputs = cx.outputs(&dep.unit)?; + let outputs = build_runner.outputs(&dep.unit)?; - if cx.only_requires_rmeta(unit, &dep.unit) || dep.unit.mode.is_check() { + if build_runner.only_requires_rmeta(unit, &dep.unit) || dep.unit.mode.is_check() { // Example: rlib dependency for an rlib, rmeta is all that is required. let output = outputs .iter() @@ -1545,13 +1602,13 @@ struct OutputOptions { } impl OutputOptions { - fn new(cx: &Context<'_, '_>, unit: &Unit) -> OutputOptions { - let path = cx.files().message_cache_path(unit); + fn new(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> OutputOptions { + let path = build_runner.files().message_cache_path(unit); // Remove old cache, ignore ENOENT, which is the common case. drop(fs::remove_file(&path)); let cache_cell = Some((path, LazyCell::new())); OutputOptions { - format: cx.bcx.build_config.message_format, + format: build_runner.bcx.build_config.message_format, cache_cell, show_diagnostics: true, warnings_seen: 0, @@ -1852,15 +1909,15 @@ fn descriptive_pkg_name(name: &str, target: &Target, mode: &CompileMode) -> Stri } /// Applies environment variables from config `[env]` to [`ProcessBuilder`]. -fn apply_env_config(config: &crate::Config, cmd: &mut ProcessBuilder) -> CargoResult<()> { - for (key, value) in config.env_config()?.iter() { +fn apply_env_config(gctx: &crate::GlobalContext, cmd: &mut ProcessBuilder) -> CargoResult<()> { + for (key, value) in gctx.env_config()?.iter() { // never override a value that has already been set by cargo if cmd.get_envs().contains_key(key) { continue; } - if value.is_force() || config.get_env_os(key).is_none() { - cmd.env(key, value.resolve(config)); + if value.is_force() || gctx.get_env_os(key).is_none() { + cmd.env(key, value.resolve(gctx)); } } Ok(()) @@ -1872,7 +1929,9 @@ fn should_include_scrape_units(bcx: &BuildContext<'_, '_>, unit: &Unit) -> bool } /// Gets the file path of function call information output from `rustdoc`. -fn scrape_output_path(cx: &Context<'_, '_>, unit: &Unit) -> CargoResult { +fn scrape_output_path(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoResult { assert!(unit.mode.is_doc() || unit.mode.is_doc_scrape()); - cx.outputs(unit).map(|outputs| outputs[0].path.clone()) + build_runner + .outputs(unit) + .map(|outputs| outputs[0].path.clone()) } diff --git a/src/cargo/core/compiler/output_depinfo.rs b/src/cargo/core/compiler/output_depinfo.rs index db98adf92bb..11d320c6a8c 100644 --- a/src/cargo/core/compiler/output_depinfo.rs +++ b/src/cargo/core/compiler/output_depinfo.rs @@ -6,7 +6,7 @@ use std::collections::{BTreeSet, HashSet}; use std::io::{BufWriter, Write}; use std::path::{Path, PathBuf}; -use super::{fingerprint, Context, FileFlavor, Unit}; +use super::{fingerprint, BuildRunner, FileFlavor, Unit}; use crate::util::{internal, CargoResult}; use cargo_util::paths; use tracing::debug; @@ -43,7 +43,7 @@ fn render_filename>(path: P, basedir: Option<&str>) -> CargoResul /// [fingerprint dep-info]: super::fingerprint#fingerprint-dep-info-files fn add_deps_for_unit( deps: &mut BTreeSet, - cx: &mut Context<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, unit: &Unit, visited: &mut HashSet, ) -> CargoResult<()> { @@ -55,10 +55,12 @@ fn add_deps_for_unit( // generate a dep info file, so we just keep on going below if !unit.mode.is_run_custom_build() { // Add dependencies from rustc dep-info output (stored in fingerprint directory) - let dep_info_loc = fingerprint::dep_info_loc(cx, unit); - if let Some(paths) = - fingerprint::parse_dep_info(unit.pkg.root(), cx.files().host_root(), &dep_info_loc)? - { + let dep_info_loc = fingerprint::dep_info_loc(build_runner, unit); + if let Some(paths) = fingerprint::parse_dep_info( + unit.pkg.root(), + build_runner.files().host_root(), + &dep_info_loc, + )? { for path in paths.files { deps.insert(path); } @@ -73,8 +75,13 @@ fn add_deps_for_unit( } // Add rerun-if-changed dependencies - if let Some(metadata) = cx.find_build_script_metadata(unit) { - if let Some(output) = cx.build_script_outputs.lock().unwrap().get(metadata) { + if let Some(metadata) = build_runner.find_build_script_metadata(unit) { + if let Some(output) = build_runner + .build_script_outputs + .lock() + .unwrap() + .get(metadata) + { for path in &output.rerun_if_changed { // The paths we have saved from the unit are of arbitrary relativeness and may be // relative to the crate root of the dependency. @@ -85,10 +92,10 @@ fn add_deps_for_unit( } // Recursively traverse all transitive dependencies - let unit_deps = Vec::from(cx.unit_deps(unit)); // Create vec due to mutable borrow. + let unit_deps = Vec::from(build_runner.unit_deps(unit)); // Create vec due to mutable borrow. for dep in unit_deps { if dep.unit.is_local() { - add_deps_for_unit(deps, cx, &dep.unit, visited)?; + add_deps_for_unit(deps, build_runner, &dep.unit, visited)?; } } Ok(()) @@ -111,16 +118,16 @@ fn add_deps_for_unit( /// `Cargo.lock`. /// /// [`fingerprint`]: super::fingerprint#dep-info-files -pub fn output_depinfo(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> { - let bcx = cx.bcx; +pub fn output_depinfo(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResult<()> { + let bcx = build_runner.bcx; let mut deps = BTreeSet::new(); let mut visited = HashSet::new(); - let success = add_deps_for_unit(&mut deps, cx, unit, &mut visited).is_ok(); + let success = add_deps_for_unit(&mut deps, build_runner, unit, &mut visited).is_ok(); let basedir_string; - let basedir = match bcx.config.build_config()?.dep_info_basedir.clone() { + let basedir = match bcx.gctx.build_config()?.dep_info_basedir.clone() { Some(value) => { basedir_string = value - .resolve_path(bcx.config) + .resolve_path(bcx.gctx) .as_os_str() .to_str() .ok_or_else(|| anyhow::format_err!("build.dep-info-basedir path not utf-8"))? @@ -134,7 +141,7 @@ pub fn output_depinfo(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> .map(|f| render_filename(f, basedir)) .collect::>>()?; - for output in cx + for output in build_runner .outputs(unit)? .iter() .filter(|o| !matches!(o.flavor, FileFlavor::DebugInfo | FileFlavor::Auxiliary)) diff --git a/src/cargo/core/compiler/rustdoc.rs b/src/cargo/core/compiler/rustdoc.rs index 81ea034c5fc..be6093fd553 100644 --- a/src/cargo/core/compiler/rustdoc.rs +++ b/src/cargo/core/compiler/rustdoc.rs @@ -1,6 +1,6 @@ //! Utilities for building with rustdoc. -use crate::core::compiler::context::Context; +use crate::core::compiler::build_runner::BuildRunner; use crate::core::compiler::unit::Unit; use crate::core::compiler::{BuildContext, CompileKind}; use crate::sources::CRATES_IO_REGISTRY; @@ -108,23 +108,23 @@ impl hash::Hash for RustdocExternMap { /// [1]: https://doc.rust-lang.org/nightly/rustdoc/unstable-features.html#--extern-html-root-url-control-how-rustdoc-links-to-non-local-crates /// [2]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#rustdoc-map pub fn add_root_urls( - cx: &Context<'_, '_>, + build_runner: &BuildRunner<'_, '_>, unit: &Unit, rustdoc: &mut ProcessBuilder, ) -> CargoResult<()> { - let config = cx.bcx.config; - if !config.cli_unstable().rustdoc_map { + let gctx = build_runner.bcx.gctx; + if !gctx.cli_unstable().rustdoc_map { tracing::debug!("`doc.extern-map` ignored, requires -Zrustdoc-map flag"); return Ok(()); } - let map = config.doc_extern_map()?; + let map = gctx.doc_extern_map()?; let mut unstable_opts = false; // Collect mapping of registry name -> index url. let name2url: HashMap<&String, Url> = map .registries .keys() .filter_map(|name| { - if let Ok(index_url) = config.get_registry_index(name) { + if let Ok(index_url) = gctx.get_registry_index(name) { Some((name, index_url)) } else { tracing::warn!( @@ -135,7 +135,7 @@ pub fn add_root_urls( } }) .collect(); - for dep in cx.unit_deps(unit) { + for dep in build_runner.unit_deps(unit) { if dep.unit.target.is_linkable() && !dep.unit.mode.is_doc() { for (registry, location) in &map.registries { let sid = dep.unit.pkg.package_id().source_id(); @@ -172,7 +172,7 @@ pub fn add_root_urls( let std_url = match &map.std { None | Some(RustdocExternMode::Remote) => None, Some(RustdocExternMode::Local) => { - let sysroot = &cx.bcx.target_data.info(CompileKind::Host).sysroot; + let sysroot = &build_runner.bcx.target_data.info(CompileKind::Host).sysroot; let html_root = sysroot.join("share").join("doc").join("rust").join("html"); if html_root.exists() { let url = Url::from_file_path(&html_root).map_err(|()| { @@ -211,12 +211,12 @@ pub fn add_root_urls( /// /// [1]: https://doc.rust-lang.org/nightly/rustdoc/unstable-features.html?highlight=output-format#-w--output-format-output-format pub fn add_output_format( - cx: &Context<'_, '_>, + build_runner: &BuildRunner<'_, '_>, unit: &Unit, rustdoc: &mut ProcessBuilder, ) -> CargoResult<()> { - let config = cx.bcx.config; - if !config.cli_unstable().unstable_options { + let gctx = build_runner.bcx.gctx; + if !gctx.cli_unstable().unstable_options { tracing::debug!("`unstable-options` is ignored, required -Zunstable-options flag"); return Ok(()); } diff --git a/src/cargo/core/compiler/standard_lib.rs b/src/cargo/core/compiler/standard_lib.rs index b76c395b87e..9f96ca27d4c 100644 --- a/src/cargo/core/compiler/standard_lib.rs +++ b/src/cargo/core/compiler/standard_lib.rs @@ -9,7 +9,7 @@ use crate::core::resolver::HasDevUnits; use crate::core::{Dependency, PackageId, PackageSet, Resolve, SourceId, Workspace}; use crate::ops::{self, Packages}; use crate::util::errors::CargoResult; -use crate::Config; +use crate::GlobalContext; use std::collections::{HashMap, HashSet}; use std::path::PathBuf; @@ -33,8 +33,8 @@ pub fn parse_unstable_flag(value: Option<&str>) -> Vec { crates.into_iter().map(|s| s.to_string()).collect() } -pub(crate) fn std_crates(config: &Config, units: Option<&[Unit]>) -> Option> { - let crates = config.cli_unstable().build_std.as_ref()?.clone(); +pub(crate) fn std_crates(gctx: &GlobalContext, units: Option<&[Unit]>) -> Option> { + let crates = gctx.cli_unstable().build_std.as_ref()?.clone(); // Only build libtest if it looks like it is needed. let mut crates = crates.clone(); @@ -60,14 +60,14 @@ pub(crate) fn std_crates(config: &Config, units: Option<&[Unit]>) -> Option( - ws: &Workspace<'cfg>, - target_data: &mut RustcTargetData<'cfg>, +pub fn resolve_std<'gctx>( + ws: &Workspace<'gctx>, + target_data: &mut RustcTargetData<'gctx>, build_config: &BuildConfig, crates: &[String], -) -> CargoResult<(PackageSet<'cfg>, Resolve, ResolvedFeatures)> { +) -> CargoResult<(PackageSet<'gctx>, Resolve, ResolvedFeatures)> { if build_config.build_plan { - ws.config() + ws.gctx() .shell() .warn("-Zbuild-std does not currently fully support --build-plan")?; } @@ -111,7 +111,7 @@ pub fn resolve_std<'cfg>( None, ); - let config = ws.config(); + let gctx = ws.gctx(); // This is a delicate hack. In order for features to resolve correctly, // the resolver needs to run a specific "current" member of the workspace. // Thus, in order to set the features for `std`, we need to set `sysroot` @@ -123,7 +123,7 @@ pub fn resolve_std<'cfg>( let current_manifest = src_path.join("library/sysroot/Cargo.toml"); // TODO: Consider doing something to enforce --locked? Or to prevent the // lock file from being written, such as setting ephemeral. - let mut std_ws = Workspace::new_virtual(src_path, current_manifest, virtual_manifest, config)?; + let mut std_ws = Workspace::new_virtual(src_path, current_manifest, virtual_manifest, gctx)?; // Don't require optional dependencies in this workspace, aka std's own // `[dev-dependencies]`. No need for us to generate a `Resolve` which has // those included because we'll never use them anyway. @@ -134,7 +134,7 @@ pub fn resolve_std<'cfg>( spec_pkgs.push("sysroot".to_string()); let spec = Packages::Packages(spec_pkgs); let specs = spec.to_package_id_specs(&std_ws)?; - let features = match &config.cli_unstable().build_std_features { + let features = match &gctx.cli_unstable().build_std_features { Some(list) => list.clone(), None => vec![ "panic-unwind".to_string(), @@ -221,7 +221,7 @@ pub fn generate_std_roots( } fn detect_sysroot_src_path(target_data: &RustcTargetData<'_>) -> CargoResult { - if let Some(s) = target_data.config.get_env_os("__CARGO_TESTS_ONLY_SRC_ROOT") { + if let Some(s) = target_data.gctx.get_env_os("__CARGO_TESTS_ONLY_SRC_ROOT") { return Ok(s.into()); } @@ -240,7 +240,7 @@ fn detect_sysroot_src_path(target_data: &RustcTargetData<'_>) -> CargoResult { anyhow::bail!("{} --toolchain {}", msg, rustup_toolchain); } diff --git a/src/cargo/core/compiler/timings.rs b/src/cargo/core/compiler/timings.rs index edd89a4635e..b4b23b6590b 100644 --- a/src/cargo/core/compiler/timings.rs +++ b/src/cargo/core/compiler/timings.rs @@ -4,12 +4,12 @@ //! long it takes for different units to compile. use super::{CompileMode, Unit}; use crate::core::compiler::job_queue::JobId; -use crate::core::compiler::{BuildContext, Context, TimingOutput}; +use crate::core::compiler::{BuildContext, BuildRunner, TimingOutput}; use crate::core::PackageId; use crate::util::cpu::State; use crate::util::machine_message::{self, Message}; use crate::util::style; -use crate::util::{CargoResult, Config}; +use crate::util::{CargoResult, GlobalContext}; use anyhow::Context as _; use cargo_util::paths; use std::collections::HashMap; @@ -24,8 +24,8 @@ use std::time::{Duration, Instant, SystemTime}; /// receives messages from spawned off threads. /// /// [`JobQueue`]: super::JobQueue -pub struct Timings<'cfg> { - config: &'cfg Config, +pub struct Timings<'gctx> { + gctx: &'gctx GlobalContext, /// Whether or not timings should be captured. enabled: bool, /// If true, saves an HTML report to disk. @@ -95,8 +95,8 @@ struct Concurrency { inactive: usize, } -impl<'cfg> Timings<'cfg> { - pub fn new(bcx: &BuildContext<'_, 'cfg>, root_units: &[Unit]) -> Timings<'cfg> { +impl<'gctx> Timings<'gctx> { + pub fn new(bcx: &BuildContext<'_, 'gctx>, root_units: &[Unit]) -> Timings<'gctx> { let has_report = |what| bcx.build_config.timing_outputs.contains(&what); let report_html = has_report(TimingOutput::Html); let report_json = has_report(TimingOutput::Json); @@ -132,11 +132,11 @@ impl<'cfg> Timings<'cfg> { }; Timings { - config: bcx.config, + gctx: bcx.gctx, enabled, report_html, report_json, - start: bcx.config.creation_time(), + start: bcx.gctx.creation_time(), start_str, root_targets, profile, @@ -229,7 +229,7 @@ impl<'cfg> Timings<'cfg> { rmeta_time: unit_time.rmeta_time, } .to_json_string(); - crate::drop_println!(self.config, "{}", msg); + crate::drop_println!(self.gctx, "{}", msg); } self.unit_times.push(unit_time); } @@ -288,7 +288,7 @@ impl<'cfg> Timings<'cfg> { /// Call this when all units are finished. pub fn finished( &mut self, - cx: &Context<'_, '_>, + build_runner: &BuildRunner<'_, '_>, error: &Option, ) -> CargoResult<()> { if !self.enabled { @@ -298,17 +298,21 @@ impl<'cfg> Timings<'cfg> { self.unit_times .sort_unstable_by(|a, b| a.start.partial_cmp(&b.start).unwrap()); if self.report_html { - self.report_html(cx, error) + self.report_html(build_runner, error) .with_context(|| "failed to save timing report")?; } Ok(()) } /// Save HTML report to disk. - fn report_html(&self, cx: &Context<'_, '_>, error: &Option) -> CargoResult<()> { + fn report_html( + &self, + build_runner: &BuildRunner<'_, '_>, + error: &Option, + ) -> CargoResult<()> { let duration = self.start.elapsed().as_secs_f64(); let timestamp = self.start_str.replace(&['-', ':'][..], ""); - let timings_path = cx.files().host_root().join("cargo-timings"); + let timings_path = build_runner.files().host_root().join("cargo-timings"); paths::create_dir_all(&timings_path)?; let filename = timings_path.join(format!("cargo-timing-{}.html", timestamp)); let mut f = BufWriter::new(paths::create(&filename)?); @@ -318,7 +322,7 @@ impl<'cfg> Timings<'cfg> { .map(|(name, _targets)| name.as_str()) .collect(); f.write_all(HTML_TMPL.replace("{ROOTS}", &roots.join(", ")).as_bytes())?; - self.write_summary_table(&mut f, duration, cx.bcx, error)?; + self.write_summary_table(&mut f, duration, build_runner.bcx, error)?; f.write_all(HTML_CANVAS.as_bytes())?; self.write_unit_table(&mut f)?; // It helps with pixel alignment to use whole numbers. @@ -343,7 +347,7 @@ impl<'cfg> Timings<'cfg> { let unstamped_filename = timings_path.join("cargo-timing.html"); paths::link_or_copy(&filename, &unstamped_filename)?; - let mut shell = self.config.shell(); + let mut shell = self.gctx.shell(); let timing_path = std::env::current_dir().unwrap_or_default().join(&filename); let link = shell.err_file_hyperlink(&timing_path); let msg = format!( diff --git a/src/cargo/core/compiler/unit.rs b/src/cargo/core/compiler/unit.rs index 335564bcd73..4e96584c3e4 100644 --- a/src/cargo/core/compiler/unit.rs +++ b/src/cargo/core/compiler/unit.rs @@ -7,7 +7,7 @@ use crate::core::profiles::Profile; use crate::core::Package; use crate::util::hex::short_hash; use crate::util::interning::InternedString; -use crate::util::Config; +use crate::util::GlobalContext; use std::cell::RefCell; use std::collections::HashSet; use std::fmt; @@ -104,8 +104,8 @@ impl UnitInner { } /// Returns whether or not warnings should be displayed for this unit. - pub fn show_warnings(&self, config: &Config) -> bool { - self.is_local() || config.extra_verbose() + pub fn show_warnings(&self, gctx: &GlobalContext) -> bool { + self.is_local() || gctx.extra_verbose() } } diff --git a/src/cargo/core/compiler/unit_dependencies.rs b/src/cargo/core/compiler/unit_dependencies.rs index e2a237d339e..9040d40fabb 100644 --- a/src/cargo/core/compiler/unit_dependencies.rs +++ b/src/cargo/core/compiler/unit_dependencies.rs @@ -31,18 +31,18 @@ use crate::core::resolver::Resolve; use crate::core::{Dependency, Package, PackageId, PackageSet, Target, TargetKind, Workspace}; use crate::ops::resolve_all_features; use crate::util::interning::InternedString; -use crate::util::Config; +use crate::util::GlobalContext; use crate::CargoResult; const IS_NO_ARTIFACT_DEP: Option<&'static Artifact> = None; /// Collection of stuff used while creating the [`UnitGraph`]. -struct State<'a, 'cfg> { - ws: &'a Workspace<'cfg>, - config: &'cfg Config, +struct State<'a, 'gctx> { + ws: &'a Workspace<'gctx>, + gctx: &'gctx GlobalContext, /// Stores the result of building the [`UnitGraph`]. unit_dependencies: UnitGraph, - package_set: &'a PackageSet<'cfg>, + package_set: &'a PackageSet<'gctx>, usr_resolve: &'a Resolve, usr_features: &'a ResolvedFeatures, /// Like `usr_resolve` but for building standard library (`-Zbuild-std`). @@ -53,7 +53,7 @@ struct State<'a, 'cfg> { is_std: bool, /// The mode we are compiling in. Used for preventing from building lib thrice. global_mode: CompileMode, - target_data: &'a RustcTargetData<'cfg>, + target_data: &'a RustcTargetData<'gctx>, profiles: &'a Profiles, interner: &'a UnitInterner, // Units for `-Zrustdoc-scrape-examples`. @@ -81,9 +81,9 @@ impl IsArtifact { /// Then entry point for building a dependency graph of compilation units. /// /// You can find some information for arguments from doc of [`State`]. -pub fn build_unit_dependencies<'a, 'cfg>( - ws: &'a Workspace<'cfg>, - package_set: &'a PackageSet<'cfg>, +pub fn build_unit_dependencies<'a, 'gctx>( + ws: &'a Workspace<'gctx>, + package_set: &'a PackageSet<'gctx>, resolve: &'a Resolve, features: &'a ResolvedFeatures, std_resolve: Option<&'a (Resolve, ResolvedFeatures)>, @@ -91,7 +91,7 @@ pub fn build_unit_dependencies<'a, 'cfg>( scrape_units: &[Unit], std_roots: &HashMap>, global_mode: CompileMode, - target_data: &'a RustcTargetData<'cfg>, + target_data: &'a RustcTargetData<'gctx>, profiles: &'a Profiles, interner: &'a UnitInterner, ) -> CargoResult { @@ -107,7 +107,7 @@ pub fn build_unit_dependencies<'a, 'cfg>( }; let mut state = State { ws, - config: ws.config(), + gctx: ws.gctx(), unit_dependencies: HashMap::new(), package_set, usr_resolve: resolve, @@ -212,9 +212,9 @@ fn deps_of_roots(roots: &[Unit], state: &mut State<'_, '_>) -> CargoResult<()> { if unit.target.proc_macro() { // Special-case for proc-macros, which are forced to for-host // since they need to link with the proc_macro crate. - UnitFor::new_host_test(state.config, root_compile_kind) + UnitFor::new_host_test(state.gctx, root_compile_kind) } else { - UnitFor::new_test(state.config, root_compile_kind) + UnitFor::new_test(state.gctx, root_compile_kind) } } else if unit.target.is_custom_build() { // This normally doesn't happen, except `clean` aggressively @@ -282,7 +282,7 @@ fn compute_deps( let dep_unit_for = unit_for.with_dependency(unit, dep_lib, unit_for.root_compile_kind()); let start = ret.len(); - if state.config.cli_unstable().dual_proc_macros + if state.gctx.cli_unstable().dual_proc_macros && dep_lib.proc_macro() && !unit.kind.is_host() { @@ -984,7 +984,7 @@ fn connect_run_custom_build_deps(state: &mut State<'_, '_>) { } } -impl<'a, 'cfg> State<'a, 'cfg> { +impl<'a, 'gctx> State<'a, 'gctx> { /// Gets `std_resolve` during building std, otherwise `usr_resolve`. fn resolve(&self) -> &'a Resolve { if self.is_std { diff --git a/src/cargo/core/compiler/unit_graph.rs b/src/cargo/core/compiler/unit_graph.rs index a8c56de3b7f..14bab76d65d 100644 --- a/src/cargo/core/compiler/unit_graph.rs +++ b/src/cargo/core/compiler/unit_graph.rs @@ -8,7 +8,7 @@ use crate::core::profiles::{Profile, UnitFor}; use crate::core::{PackageId, Target}; use crate::util::interning::InternedString; use crate::util::CargoResult; -use crate::Config; +use crate::GlobalContext; use std::collections::HashMap; use std::io::Write; @@ -78,7 +78,7 @@ struct SerializedUnitDep { pub fn emit_serialized_unit_graph( root_units: &[Unit], unit_graph: &UnitGraph, - config: &Config, + gctx: &GlobalContext, ) -> CargoResult<()> { let mut units: Vec<(&Unit, &Vec)> = unit_graph.iter().collect(); units.sort_unstable(); @@ -96,7 +96,7 @@ pub fn emit_serialized_unit_graph( .iter() .map(|unit_dep| { // https://github.com/rust-lang/rust/issues/64260 when stabilized. - let (public, noprelude) = if config.nightly_features_allowed { + let (public, noprelude) = if gctx.nightly_features_allowed { (Some(unit_dep.public), Some(unit_dep.noprelude)) } else { (None, None) diff --git a/src/cargo/core/features.rs b/src/cargo/core/features.rs index dd82833a38b..c1d6493ed0a 100644 --- a/src/cargo/core/features.rs +++ b/src/cargo/core/features.rs @@ -79,7 +79,7 @@ //! [`unstable_cli_options!`]. Flags can take an optional value if you want. //! 2. Update the [`CliUnstable::add`] function to parse the flag. //! 3. Wherever the new functionality is implemented, call -//! [`Config::cli_unstable`] to get an instance of [`CliUnstable`] +//! [`GlobalContext::cli_unstable`] to get an instance of [`CliUnstable`] //! and check if the option has been enabled on the [`CliUnstable`] instance. //! Nightly gating is already handled, so no need to worry about that. //! If warning when feature is used without the gate, be sure to gracefully degrade (with a @@ -112,7 +112,7 @@ //! and summarize it similar to the other entries. Update the rest of the //! documentation to add the new feature. //! -//! [`Config::cli_unstable`]: crate::util::config::Config::cli_unstable +//! [`GlobalContext::cli_unstable`]: crate::util::config::GlobalContext::cli_unstable //! [`fail_if_stable_opt`]: CliUnstable::fail_if_stable_opt //! [`features!`]: macro.features.html //! [`unstable_cli_options!`]: macro.unstable_cli_options.html @@ -130,7 +130,7 @@ use serde::{Deserialize, Serialize}; use crate::core::resolver::ResolveBehavior; use crate::util::errors::CargoResult; use crate::util::indented_lines; -use crate::Config; +use crate::GlobalContext; pub const SEE_CHANNELS: &str = "See https://doc.rust-lang.org/book/appendix-07-nightly-rust.html for more information \ @@ -311,6 +311,7 @@ impl fmt::Display for Edition { } } } + impl FromStr for Edition { type Err = Error; fn from_str(s: &str) -> Result { @@ -401,12 +402,12 @@ macro_rules! features { impl Features { fn status(&mut self, feature: &str) -> Option<(&mut bool, &'static Feature)> { if feature.contains("_") { - return None + return None; } let feature = feature.replace("-", "_"); $( if feature == stringify!($feature) { - return Some((&mut self.$feature, Feature::$feature())) + return Some((&mut self.$feature, Feature::$feature())); } )* None @@ -514,15 +515,15 @@ impl Features { /// Creates a new unstable features context. pub fn new( features: &[String], - config: &Config, + gctx: &GlobalContext, warnings: &mut Vec, is_local: bool, ) -> CargoResult { let mut ret = Features::default(); - ret.nightly_features_allowed = config.nightly_features_allowed; + ret.nightly_features_allowed = gctx.nightly_features_allowed; ret.is_local = is_local; for feature in features { - ret.add(feature, config, warnings)?; + ret.add(feature, gctx, warnings)?; ret.activated.push(feature.to_string()); } Ok(ret) @@ -531,7 +532,7 @@ impl Features { fn add( &mut self, feature_name: &str, - config: &Config, + gctx: &GlobalContext, warnings: &mut Vec, ) -> CargoResult<()> { let nightly_features_allowed = self.nightly_features_allowed; @@ -580,7 +581,7 @@ impl Features { see_docs() ), Status::Unstable => { - if let Some(allow) = &config.cli_unstable().allow_features { + if let Some(allow) = &gctx.cli_unstable().allow_features { if !allow.contains(feature_name) { bail!( "the feature `{}` is not in the list of allowed features: [{}]", @@ -1201,7 +1202,7 @@ impl CliUnstable { /// unstable subcommand. pub fn fail_if_stable_command( &self, - config: &Config, + gctx: &GlobalContext, command: &str, issue: u32, z_name: &str, @@ -1215,7 +1216,7 @@ impl CliUnstable { information about the `cargo {}` command.", issue, command ); - if config.nightly_features_allowed { + if gctx.nightly_features_allowed { bail!( "the `cargo {command}` command is unstable, pass `-Z {z_name}` \ to enable it\n\ diff --git a/src/cargo/core/gc.rs b/src/cargo/core/gc.rs index 565078ff006..565cc3ec2d2 100644 --- a/src/cargo/core/gc.rs +++ b/src/cargo/core/gc.rs @@ -22,8 +22,8 @@ use crate::core::global_cache_tracker::{self, GlobalCacheTracker}; use crate::ops::CleanContext; use crate::util::cache_lock::{CacheLock, CacheLockMode}; -use crate::{CargoResult, Config}; -use anyhow::{format_err, Context}; +use crate::{CargoResult, GlobalContext}; +use anyhow::{format_err, Context as _}; use serde::Deserialize; use std::time::Duration; @@ -48,11 +48,11 @@ const DEFAULT_AUTO_FREQUENCY: &str = "1 day"; /// /// It should be cheap to call this multiple times (subsequent calls are /// ignored), but try not to abuse that. -pub fn auto_gc(config: &Config) { - if !config.cli_unstable().gc { +pub fn auto_gc(gctx: &GlobalContext) { + if !gctx.cli_unstable().gc { return; } - if !config.network_allowed() { + if !gctx.network_allowed() { // As a conservative choice, auto-gc is disabled when offline. If the // user is indefinitely offline, we don't want to delete things they // may later depend on. @@ -60,21 +60,21 @@ pub fn auto_gc(config: &Config) { return; } - if let Err(e) = auto_gc_inner(config) { - if global_cache_tracker::is_silent_error(&e) && !config.extra_verbose() { + if let Err(e) = auto_gc_inner(gctx) { + if global_cache_tracker::is_silent_error(&e) && !gctx.extra_verbose() { tracing::warn!(target: "gc", "failed to auto-clean cache data: {e:?}"); } else { crate::display_warning_with_error( "failed to auto-clean cache data", &e, - &mut config.shell(), + &mut gctx.shell(), ); } } } -fn auto_gc_inner(config: &Config) -> CargoResult<()> { - let _lock = match config.try_acquire_package_cache_lock(CacheLockMode::MutateExclusive)? { +fn auto_gc_inner(gctx: &GlobalContext) -> CargoResult<()> { + let _lock = match gctx.try_acquire_package_cache_lock(CacheLockMode::MutateExclusive)? { Some(lock) => lock, None => { tracing::debug!(target: "gc", "unable to acquire mutate lock, auto gc disabled"); @@ -82,12 +82,12 @@ fn auto_gc_inner(config: &Config) -> CargoResult<()> { } }; // This should not be called when there are pending deferred entries, so check that. - let deferred = config.deferred_global_last_use()?; + let deferred = gctx.deferred_global_last_use()?; debug_assert!(deferred.is_empty()); - let mut global_cache_tracker = config.global_cache_tracker()?; - let mut gc = Gc::new(config, &mut global_cache_tracker)?; - let mut clean_ctx = CleanContext::new(config); - gc.auto(&mut clean_ctx)?; + let mut global_cache_tracker = gctx.global_cache_tracker()?; + let mut gc = Gc::new(gctx, &mut global_cache_tracker)?; + let mut clean_gctx = CleanContext::new(gctx); + gc.auto(&mut clean_gctx)?; Ok(()) } @@ -172,8 +172,8 @@ impl GcOpts { /// Updates the configuration of this [`GcOpts`] to incorporate the /// settings from config. - pub fn update_for_auto_gc(&mut self, config: &Config) -> CargoResult<()> { - let auto_config = config + pub fn update_for_auto_gc(&mut self, gctx: &GlobalContext) -> CargoResult<()> { + let auto_config = gctx .get::>("gc.auto")? .unwrap_or_default(); self.update_for_auto_gc_config(&auto_config) @@ -227,8 +227,8 @@ impl GcOpts { /// Garbage collector. /// /// See the module docs at [`crate::core::gc`] for more information on GC. -pub struct Gc<'a, 'config> { - config: &'config Config, +pub struct Gc<'a, 'gctx> { + gctx: &'gctx GlobalContext, global_cache_tracker: &'a mut GlobalCacheTracker, /// A lock on the package cache. /// @@ -236,17 +236,17 @@ pub struct Gc<'a, 'config> { /// be allowed to write to the cache at the same time, or for others to /// read while we are modifying the cache. #[allow(dead_code)] // Held for drop. - lock: CacheLock<'config>, + lock: CacheLock<'gctx>, } -impl<'a, 'config> Gc<'a, 'config> { +impl<'a, 'gctx> Gc<'a, 'gctx> { pub fn new( - config: &'config Config, + gctx: &'gctx GlobalContext, global_cache_tracker: &'a mut GlobalCacheTracker, - ) -> CargoResult> { - let lock = config.acquire_package_cache_lock(CacheLockMode::MutateExclusive)?; + ) -> CargoResult> { + let lock = gctx.acquire_package_cache_lock(CacheLockMode::MutateExclusive)?; Ok(Gc { - config, + gctx, global_cache_tracker, lock, }) @@ -256,12 +256,12 @@ impl<'a, 'config> Gc<'a, 'config> { /// /// This returns immediately without doing work if garbage collection has /// been performed recently (since `gc.auto.frequency`). - fn auto(&mut self, clean_ctx: &mut CleanContext<'config>) -> CargoResult<()> { - if !self.config.cli_unstable().gc { + fn auto(&mut self, clean_gctx: &mut CleanContext<'gctx>) -> CargoResult<()> { + if !self.gctx.cli_unstable().gc { return Ok(()); } let auto_config = self - .config + .gctx .get::>("gc.auto")? .unwrap_or_default(); let Some(freq) = parse_frequency( @@ -279,8 +279,8 @@ impl<'a, 'config> Gc<'a, 'config> { } let mut gc_opts = GcOpts::default(); gc_opts.update_for_auto_gc_config(&auto_config)?; - self.gc(clean_ctx, &gc_opts)?; - if !clean_ctx.dry_run { + self.gc(clean_gctx, &gc_opts)?; + if !clean_gctx.dry_run { self.global_cache_tracker.set_last_auto_gc()?; } Ok(()) @@ -289,10 +289,10 @@ impl<'a, 'config> Gc<'a, 'config> { /// Performs garbage collection based on the given options. pub fn gc( &mut self, - clean_ctx: &mut CleanContext<'config>, + clean_gctx: &mut CleanContext<'gctx>, gc_opts: &GcOpts, ) -> CargoResult<()> { - self.global_cache_tracker.clean(clean_ctx, gc_opts)?; + self.global_cache_tracker.clean(clean_gctx, gc_opts)?; // In the future, other gc operations go here, such as target cleaning. Ok(()) } diff --git a/src/cargo/core/global_cache_tracker.rs b/src/cargo/core/global_cache_tracker.rs index 21f9b31c3ca..814d4a5eff5 100644 --- a/src/cargo/core/global_cache_tracker.rs +++ b/src/cargo/core/global_cache_tracker.rs @@ -18,7 +18,7 @@ //! type. //! //! There is a single global [`GlobalCacheTracker`] and -//! [`DeferredGlobalLastUse`] stored in [`Config`]. +//! [`DeferredGlobalLastUse`] stored in [`GlobalContext`]. //! //! The high-level interface for performing garbage collection is defined in //! the [`crate::core::gc`] module. The functions there are responsible for @@ -121,8 +121,8 @@ use crate::util::cache_lock::CacheLockMode; use crate::util::interning::InternedString; use crate::util::sqlite::{self, basic_migration, Migration}; use crate::util::{Filesystem, Progress, ProgressStyle}; -use crate::{CargoResult, Config}; -use anyhow::{bail, Context}; +use crate::{CargoResult, GlobalContext}; +use anyhow::{bail, Context as _}; use cargo_util::paths; use rusqlite::{params, Connection, ErrorCode}; use std::collections::{hash_map, HashMap}; @@ -346,16 +346,15 @@ impl GlobalCacheTracker { /// /// The caller is responsible for locking the package cache with /// [`CacheLockMode::DownloadExclusive`] before calling this. - pub fn new(config: &Config) -> CargoResult { - let db_path = Self::db_path(config); + pub fn new(gctx: &GlobalContext) -> CargoResult { + let db_path = Self::db_path(gctx); // A package cache lock is required to ensure only one cargo is // accessing at the same time. If there is concurrent access, we // want to rely on cargo's own "Blocking" system (which can // provide user feedback) rather than blocking inside sqlite // (which by default has a short timeout). - let db_path = - config.assert_package_cache_locked(CacheLockMode::DownloadExclusive, &db_path); - let mut conn = if config.cli_unstable().gc { + let db_path = gctx.assert_package_cache_locked(CacheLockMode::DownloadExclusive, &db_path); + let mut conn = if gctx.cli_unstable().gc { Connection::open(db_path)? } else { // To simplify things (so there aren't checks everywhere for being @@ -371,8 +370,8 @@ impl GlobalCacheTracker { } /// The path to the database. - pub fn db_path(config: &Config) -> Filesystem { - config.home().join(GLOBAL_CACHE_FILENAME) + pub fn db_path(gctx: &GlobalContext) -> Filesystem { + gctx.home().join(GLOBAL_CACHE_FILENAME) } /// Given an encoded registry name, returns its ID. @@ -548,24 +547,28 @@ impl GlobalCacheTracker { } /// Deletes files from the global cache based on the given options. - pub fn clean(&mut self, clean_ctx: &mut CleanContext<'_>, gc_opts: &GcOpts) -> CargoResult<()> { - self.clean_inner(clean_ctx, gc_opts) + pub fn clean( + &mut self, + clean_gctx: &mut CleanContext<'_>, + gc_opts: &GcOpts, + ) -> CargoResult<()> { + self.clean_inner(clean_gctx, gc_opts) .with_context(|| "failed to clean entries from the global cache") } fn clean_inner( &mut self, - clean_ctx: &mut CleanContext<'_>, + clean_gctx: &mut CleanContext<'_>, gc_opts: &GcOpts, ) -> CargoResult<()> { let _p = crate::util::profile::start("cleaning global cache files"); - let config = clean_ctx.config; + let gctx = clean_gctx.gctx; let base = BasePaths { - index: config.registry_index_path().into_path_unlocked(), - git_db: config.git_db_path().into_path_unlocked(), - git_co: config.git_checkouts_path().into_path_unlocked(), - crate_dir: config.registry_cache_path().into_path_unlocked(), - src: config.registry_source_path().into_path_unlocked(), + index: gctx.registry_index_path().into_path_unlocked(), + git_db: gctx.git_db_path().into_path_unlocked(), + git_co: gctx.git_checkouts_path().into_path_unlocked(), + crate_dir: gctx.registry_cache_path().into_path_unlocked(), + src: gctx.registry_source_path().into_path_unlocked(), }; let now = now(); trace!(target: "gc", "cleaning {gc_opts:?}"); @@ -577,7 +580,7 @@ impl GlobalCacheTracker { Self::sync_db_with_files( &tx, now, - config, + gctx, &base, gc_opts.is_download_cache_size_set(), &mut delete_paths, @@ -654,9 +657,9 @@ impl GlobalCacheTracker { Self::get_registry_items_to_clean_size_both(&tx, max_size, &base, &mut delete_paths)?; } - clean_ctx.remove_paths(&delete_paths)?; + clean_gctx.remove_paths(&delete_paths)?; - if clean_ctx.dry_run { + if clean_gctx.dry_run { tx.rollback()?; } else { tx.commit()?; @@ -706,7 +709,7 @@ impl GlobalCacheTracker { fn sync_db_with_files( conn: &Connection, now: Timestamp, - config: &Config, + gctx: &GlobalContext, base: &BasePaths, sync_size: bool, delete_paths: &mut Vec, @@ -761,7 +764,7 @@ impl GlobalCacheTracker { Self::populate_untracked( conn, now, - config, + gctx, REGISTRY_INDEX_TABLE, "registry_id", REGISTRY_SRC_TABLE, @@ -771,7 +774,7 @@ impl GlobalCacheTracker { Self::populate_untracked( conn, now, - config, + gctx, GIT_DB_TABLE, "git_id", GIT_CO_TABLE, @@ -783,7 +786,7 @@ impl GlobalCacheTracker { if sync_size { Self::update_null_sizes( conn, - config, + gctx, REGISTRY_INDEX_TABLE, "registry_id", REGISTRY_SRC_TABLE, @@ -791,7 +794,7 @@ impl GlobalCacheTracker { )?; Self::update_null_sizes( conn, - config, + gctx, GIT_DB_TABLE, "git_id", GIT_CO_TABLE, @@ -932,7 +935,7 @@ impl GlobalCacheTracker { fn populate_untracked( conn: &Connection, now: Timestamp, - config: &Config, + gctx: &GlobalContext, id_table_name: &str, id_column_name: &str, table_name: &str, @@ -956,7 +959,7 @@ impl GlobalCacheTracker { VALUES (?1, ?2, ?3, ?4) ON CONFLICT DO NOTHING", ))?; - let mut progress = Progress::with_style("Scanning", ProgressStyle::Ratio, config); + let mut progress = Progress::with_style("Scanning", ProgressStyle::Ratio, gctx); // Compute the size of any directory not in the database. for id_name in id_names { let Some(id) = Self::id_from_name(conn, id_table_name, &id_name)? else { @@ -996,7 +999,7 @@ impl GlobalCacheTracker { /// `update_db_for_removed` should be called before this is called. fn update_null_sizes( conn: &Connection, - config: &Config, + gctx: &GlobalContext, parent_table_name: &str, id_column_name: &str, table_name: &str, @@ -1012,7 +1015,7 @@ impl GlobalCacheTracker { let mut update_stmt = conn.prepare_cached(&format!( "UPDATE {table_name} SET size = ?1 WHERE rowid = ?2" ))?; - let mut progress = Progress::with_style("Scanning", ProgressStyle::Ratio, config); + let mut progress = Progress::with_style("Scanning", ProgressStyle::Ratio, gctx); let rows: Vec<_> = null_stmt .query_map([], |row| { Ok((row.get_unwrap(0), row.get_unwrap(1), row.get_unwrap(2))) @@ -1589,13 +1592,13 @@ impl DeferredGlobalLastUse { /// error. /// /// This will log or display a warning to the user. - pub fn save_no_error(&mut self, config: &Config) { - if let Err(e) = self.save_with_config(config) { + pub fn save_no_error(&mut self, gctx: &GlobalContext) { + if let Err(e) = self.save_with_gctx(gctx) { // Because there is an assertion in auto-gc that checks if this is // empty, be sure to clear it so that assertion doesn't fail. self.clear(); if !self.save_err_has_warned { - if is_silent_error(&e) && config.shell().verbosity() != Verbosity::Verbose { + if is_silent_error(&e) && gctx.shell().verbosity() != Verbosity::Verbose { tracing::warn!("failed to save last-use data: {e:?}"); } else { crate::display_warning_with_error( @@ -1604,7 +1607,7 @@ impl DeferredGlobalLastUse { used in its global cache. This information is used for \ automatically removing unused data in the cache.", &e, - &mut config.shell(), + &mut gctx.shell(), ); self.save_err_has_warned = true; } @@ -1612,8 +1615,8 @@ impl DeferredGlobalLastUse { } } - fn save_with_config(&mut self, config: &Config) -> CargoResult<()> { - let mut tracker = config.global_cache_tracker()?; + fn save_with_gctx(&mut self, gctx: &GlobalContext) -> CargoResult<()> { + let mut tracker = gctx.global_cache_tracker()?; self.save(&mut tracker) } diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index ac0bb44f929..9f4eb29ada5 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -20,7 +20,7 @@ use crate::core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary}; use crate::core::{Edition, Feature, Features, WorkspaceConfig}; use crate::util::errors::*; use crate::util::interning::InternedString; -use crate::util::{short_hash, Config, Filesystem}; +use crate::util::{short_hash, Filesystem, GlobalContext}; pub enum EitherManifest { Real(Manifest), @@ -559,10 +559,10 @@ impl Manifest { } // Just a helper function to test out `-Z` flags on Cargo - pub fn print_teapot(&self, config: &Config) { + pub fn print_teapot(&self, gctx: &GlobalContext) { if let Some(teapot) = self.im_a_teapot { - if config.cli_unstable().print_im_a_teapot { - crate::drop_println!(config, "im-a-teapot = {}", teapot); + if gctx.cli_unstable().print_im_a_teapot { + crate::drop_println!(gctx, "im-a-teapot = {}", teapot); } } } diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index 8675b0ab252..e5ba3a9b369 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -8,7 +8,7 @@ use std::path::{Path, PathBuf}; use std::rc::Rc; use std::time::{Duration, Instant}; -use anyhow::Context; +use anyhow::Context as _; use bytesize::ByteSize; use cargo_util_schemas::manifest::RustVersion; use curl::easy::Easy; @@ -33,7 +33,7 @@ use crate::util::network::http::HttpTimeout; use crate::util::network::retry::{Retry, RetryResult}; use crate::util::network::sleep::SleepTracker; use crate::util::toml::prepare_for_publish; -use crate::util::{self, internal, Config, Progress, ProgressStyle}; +use crate::util::{self, internal, GlobalContext, Progress, ProgressStyle}; pub const MANIFEST_PREAMBLE: &str = "\ # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO @@ -299,10 +299,10 @@ impl hash::Hash for Package { /// /// This is primarily used to convert a set of `PackageId`s to `Package`s. It /// will download as needed, or used the cached download if available. -pub struct PackageSet<'cfg> { +pub struct PackageSet<'gctx> { packages: HashMap>, - sources: RefCell>, - config: &'cfg Config, + sources: RefCell>, + gctx: &'gctx GlobalContext, multi: Multi, /// Used to prevent reusing the PackageSet to download twice. downloading: Cell, @@ -311,17 +311,17 @@ pub struct PackageSet<'cfg> { } /// Helper for downloading crates. -pub struct Downloads<'a, 'cfg> { - set: &'a PackageSet<'cfg>, +pub struct Downloads<'a, 'gctx> { + set: &'a PackageSet<'gctx>, /// When a download is started, it is added to this map. The key is a /// "token" (see `Download::token`). It is removed once the download is /// finished. - pending: HashMap, EasyHandle)>, + pending: HashMap, EasyHandle)>, /// Set of packages currently being downloaded. This should stay in sync /// with `pending`. pending_ids: HashSet, /// Downloads that have failed and are waiting to retry again later. - sleeping: SleepTracker<(Download<'cfg>, Easy)>, + sleeping: SleepTracker<(Download<'gctx>, Easy)>, /// The final result of each download. A pair `(token, result)`. This is a /// temporary holding area, needed because curl can report multiple /// downloads at once, but the main loop (`wait`) is written to only @@ -330,7 +330,7 @@ pub struct Downloads<'a, 'cfg> { /// The next ID to use for creating a token (see `Download::token`). next: usize, /// Progress bar. - progress: RefCell>>, + progress: RefCell>>, /// Number of downloads that have successfully finished. downloads_finished: usize, /// Total bytes for all successfully downloaded packages. @@ -365,10 +365,10 @@ pub struct Downloads<'a, 'cfg> { next_speed_check_bytes_threshold: Cell, /// Global filesystem lock to ensure only one Cargo is downloading at a /// time. - _lock: CacheLock<'cfg>, + _lock: CacheLock<'gctx>, } -struct Download<'cfg> { +struct Download<'gctx> { /// The token for this download, used as the key of the `Downloads::pending` map /// and stored in `EasyHandle` as well. token: usize, @@ -398,19 +398,19 @@ struct Download<'cfg> { timed_out: Cell>, /// Logic used to track retrying this download if it's a spurious failure. - retry: Retry<'cfg>, + retry: Retry<'gctx>, } -impl<'cfg> PackageSet<'cfg> { +impl<'gctx> PackageSet<'gctx> { pub fn new( package_ids: &[PackageId], - sources: SourceMap<'cfg>, - config: &'cfg Config, - ) -> CargoResult> { + sources: SourceMap<'gctx>, + gctx: &'gctx GlobalContext, + ) -> CargoResult> { // We've enabled the `http2` feature of `curl` in Cargo, so treat // failures here as fatal as it would indicate a build-time problem. let mut multi = Multi::new(); - let multiplexing = config.http_config()?.multiplexing.unwrap_or(true); + let multiplexing = gctx.http_config()?.multiplexing.unwrap_or(true); multi .pipelining(false, multiplexing) .with_context(|| "failed to enable multiplexing/pipelining in curl")?; @@ -424,7 +424,7 @@ impl<'cfg> PackageSet<'cfg> { .map(|&id| (id, LazyCell::new())) .collect(), sources: RefCell::new(sources), - config, + gctx, multi, downloading: Cell::new(false), multiplexing, @@ -439,9 +439,9 @@ impl<'cfg> PackageSet<'cfg> { self.packages.values().filter_map(|p| p.borrow()) } - pub fn enable_download<'a>(&'a self) -> CargoResult> { + pub fn enable_download<'a>(&'a self) -> CargoResult> { assert!(!self.downloading.replace(true)); - let timeout = HttpTimeout::new(self.config)?; + let timeout = HttpTimeout::new(self.gctx)?; Ok(Downloads { start: Instant::now(), set: self, @@ -453,7 +453,7 @@ impl<'cfg> PackageSet<'cfg> { progress: RefCell::new(Some(Progress::with_style( "Downloading", ProgressStyle::Ratio, - self.config, + self.gctx, ))), downloads_finished: 0, downloaded_bytes: 0, @@ -464,7 +464,7 @@ impl<'cfg> PackageSet<'cfg> { next_speed_check: Cell::new(Instant::now()), next_speed_check_bytes_threshold: Cell::new(0), _lock: self - .config + .gctx .acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?, }) } @@ -479,7 +479,7 @@ impl<'cfg> PackageSet<'cfg> { pub fn get_many(&self, ids: impl IntoIterator) -> CargoResult> { let mut pkgs = Vec::new(); let _lock = self - .config + .gctx .acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?; let mut downloads = self.enable_download()?; for id in ids { @@ -491,8 +491,8 @@ impl<'cfg> PackageSet<'cfg> { downloads.success = true; drop(downloads); - let mut deferred = self.config.deferred_global_last_use()?; - deferred.save_no_error(self.config); + let mut deferred = self.gctx.deferred_global_last_use()?; + deferred.save_no_error(self.gctx); Ok(pkgs) } @@ -503,7 +503,7 @@ impl<'cfg> PackageSet<'cfg> { root_ids: &[PackageId], has_dev_units: HasDevUnits, requested_kinds: &[CompileKind], - target_data: &RustcTargetData<'cfg>, + target_data: &RustcTargetData<'gctx>, force_all_targets: ForceAllTargets, ) -> CargoResult<()> { fn collect_used_deps( @@ -564,7 +564,7 @@ impl<'cfg> PackageSet<'cfg> { /// to instantly abort, or that do not have any libs which results in warnings. pub(crate) fn warn_no_lib_packages_and_artifact_libs_overlapping_deps( &self, - ws: &Workspace<'cfg>, + ws: &Workspace<'gctx>, resolve: &Resolve, root_ids: &[PackageId], has_dev_units: HasDevUnits, @@ -605,7 +605,7 @@ impl<'cfg> PackageSet<'cfg> { .map(|artifact| artifact.is_lib()) .unwrap_or(true) }) { - ws.config().shell().warn(&format!( + ws.gctx().shell().warn(&format!( "{} ignoring invalid dependency `{}` which is missing a lib target", pkg_id, dep.name_in_toml(), @@ -646,16 +646,16 @@ impl<'cfg> PackageSet<'cfg> { .into_iter() } - pub fn sources(&self) -> Ref<'_, SourceMap<'cfg>> { + pub fn sources(&self) -> Ref<'_, SourceMap<'gctx>> { self.sources.borrow() } - pub fn sources_mut(&self) -> RefMut<'_, SourceMap<'cfg>> { + pub fn sources_mut(&self) -> RefMut<'_, SourceMap<'gctx>> { self.sources.borrow_mut() } /// Merge the given set into self. - pub fn add_set(&mut self, set: PackageSet<'cfg>) { + pub fn add_set(&mut self, set: PackageSet<'gctx>) { assert!(!self.downloading.get()); assert!(!set.downloading.get()); for (pkg_id, p_cell) in set.packages { @@ -667,7 +667,7 @@ impl<'cfg> PackageSet<'cfg> { } } -impl<'a, 'cfg> Downloads<'a, 'cfg> { +impl<'a, 'gctx> Downloads<'a, 'gctx> { /// Starts to download the package for the `id` specified. /// /// Returns `None` if the package is queued up for download and will @@ -722,7 +722,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { debug!(target: "network", "downloading {} as {}", id, token); assert!(self.pending_ids.insert(id)); - let (mut handle, _timeout) = http_handle_and_timeout(self.set.config)?; + let (mut handle, _timeout) = http_handle_and_timeout(self.set.gctx)?; handle.get(true)?; handle.url(&url)?; handle.follow_location(true)?; // follow redirects @@ -777,10 +777,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { && self.pending.is_empty() && !self.progress.borrow().as_ref().unwrap().is_enabled() { - self.set - .config - .shell() - .status("Downloading", "crates ...")?; + self.set.gctx.shell().status("Downloading", "crates ...")?; } let dl = Download { @@ -794,7 +791,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { current: Cell::new(0), start: Instant::now(), timed_out: Cell::new(None), - retry: Retry::new(self.set.config)?, + retry: Retry::new(self.set.gctx)?, }; self.enqueue(dl, handle)?; self.tick(WhyTick::DownloadStarted)?; @@ -890,10 +887,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { // semblance of progress of how we're downloading crates, and if the // progress bar is enabled this provides a good log of what's happening. self.progress.borrow_mut().as_mut().unwrap().clear(); - self.set - .config - .shell() - .status("Downloaded", &dl.descriptor)?; + self.set.gctx.shell().status("Downloaded", &dl.descriptor)?; self.downloads_finished += 1; self.downloaded_bytes += dl.total.get(); @@ -935,7 +929,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { Ok(slot.borrow().unwrap()) } - fn enqueue(&mut self, dl: Download<'cfg>, handle: Easy) -> CargoResult<()> { + fn enqueue(&mut self, dl: Download<'gctx>, handle: Easy) -> CargoResult<()> { let mut handle = self.set.multi.add(handle)?; let now = Instant::now(); handle.set_token(dl.token)?; @@ -1121,7 +1115,7 @@ enum WhyTick<'a> { Extracting(&'a str), } -impl<'a, 'cfg> Drop for Downloads<'a, 'cfg> { +impl<'a, 'gctx> Drop for Downloads<'a, 'gctx> { fn drop(&mut self) { self.set.downloading.set(false); let progress = self.progress.get_mut().take().unwrap(); @@ -1163,7 +1157,7 @@ impl<'a, 'cfg> Drop for Downloads<'a, 'cfg> { } // Clear progress before displaying final summary. drop(progress); - drop(self.set.config.shell().status("Downloaded", status)); + drop(self.set.gctx.shell().status("Downloaded", status)); } } diff --git a/src/cargo/core/profiles.rs b/src/cargo/core/profiles.rs index 98dea9ece40..701f191d57c 100644 --- a/src/cargo/core/profiles.rs +++ b/src/cargo/core/profiles.rs @@ -30,7 +30,7 @@ use crate::core::{ }; use crate::util::interning::InternedString; use crate::util::toml::validate_profile; -use crate::util::{closest_msg, config, CargoResult, Config}; +use crate::util::{closest_msg, config, CargoResult, GlobalContext}; use anyhow::{bail, Context as _}; use cargo_util_schemas::manifest::TomlTrimPaths; use cargo_util_schemas::manifest::TomlTrimPathsValue; @@ -69,13 +69,13 @@ pub struct Profiles { impl Profiles { pub fn new(ws: &Workspace<'_>, requested_profile: InternedString) -> CargoResult { - let config = ws.config(); - let incremental = match config.get_env_os("CARGO_INCREMENTAL") { + let gctx = ws.gctx(); + let incremental = match gctx.get_env_os("CARGO_INCREMENTAL") { Some(v) => Some(v == "1"), - None => config.build_config()?.incremental, + None => gctx.build_config()?.incremental, }; let mut profiles = merge_config_profiles(ws, requested_profile)?; - let rustc_host = ws.config().load_global_rustc(Some(ws))?.host; + let rustc_host = ws.gctx().load_global_rustc(Some(ws))?.host; let mut profile_makers = Profiles { incremental, @@ -87,7 +87,7 @@ impl Profiles { }; let trim_paths_enabled = ws.unstable_features().is_enabled(Feature::trim_paths()) - || config.cli_unstable().trim_paths; + || gctx.cli_unstable().trim_paths; Self::add_root_profiles(&mut profile_makers, &profiles, trim_paths_enabled); // Merge with predefined profiles. @@ -1109,7 +1109,7 @@ impl UnitFor { /// whether `panic=abort` is supported for tests. Historical versions of /// rustc did not support this, but newer versions do with an unstable /// compiler flag. - pub fn new_test(config: &Config, root_compile_kind: CompileKind) -> UnitFor { + pub fn new_test(gctx: &GlobalContext, root_compile_kind: CompileKind) -> UnitFor { UnitFor { host: false, host_features: false, @@ -1117,7 +1117,7 @@ impl UnitFor { // which inherits the panic setting from the dev/release profile // (basically avoid recompiles) but historical defaults required // that we always unwound. - panic_setting: if config.cli_unstable().panic_abort_tests { + panic_setting: if gctx.cli_unstable().panic_abort_tests { PanicSetting::ReadProfile } else { PanicSetting::AlwaysUnwind @@ -1130,8 +1130,8 @@ impl UnitFor { /// This is a special case for unit tests of a proc-macro. /// /// Proc-macro unit tests are forced to be run on the host. - pub fn new_host_test(config: &Config, root_compile_kind: CompileKind) -> UnitFor { - let mut unit_for = UnitFor::new_test(config, root_compile_kind); + pub fn new_host_test(gctx: &GlobalContext, root_compile_kind: CompileKind) -> UnitFor { + let mut unit_for = UnitFor::new_test(gctx, root_compile_kind); unit_for.host = true; unit_for.host_features = true; unit_for @@ -1299,7 +1299,7 @@ fn merge_config_profiles( /// Helper for fetching a profile from config. fn get_config_profile(ws: &Workspace<'_>, name: &str) -> CargoResult> { let profile: Option> = - ws.config().get(&format!("profile.{}", name))?; + ws.gctx().get(&format!("profile.{}", name))?; let Some(profile) = profile else { return Ok(None); }; @@ -1307,7 +1307,7 @@ fn get_config_profile(ws: &Workspace<'_>, name: &str) -> CargoResult, name: &str) -> CargoResult { - config: &'cfg Config, - sources: SourceMap<'cfg>, +pub struct PackageRegistry<'gctx> { + gctx: &'gctx GlobalContext, + sources: SourceMap<'gctx>, // A list of sources which are considered "overrides" which take precedent // when querying for packages. @@ -83,7 +83,7 @@ pub struct PackageRegistry<'cfg> { locked: LockedMap, yanked_whitelist: HashSet, - source_config: SourceConfigMap<'cfg>, + source_config: SourceConfigMap<'gctx>, patches: HashMap>, patches_locked: bool, @@ -132,11 +132,11 @@ pub struct LockedPatchDependency { pub alt_package_id: Option, } -impl<'cfg> PackageRegistry<'cfg> { - pub fn new(config: &'cfg Config) -> CargoResult> { - let source_config = SourceConfigMap::new(config)?; +impl<'gctx> PackageRegistry<'gctx> { + pub fn new(gctx: &'gctx GlobalContext) -> CargoResult> { + let source_config = SourceConfigMap::new(gctx)?; Ok(PackageRegistry { - config, + gctx, sources: SourceMap::new(), source_ids: HashMap::new(), overrides: Vec::new(), @@ -149,9 +149,9 @@ impl<'cfg> PackageRegistry<'cfg> { }) } - pub fn get(self, package_ids: &[PackageId]) -> CargoResult> { + pub fn get(self, package_ids: &[PackageId]) -> CargoResult> { trace!("getting packages; sources={}", self.sources.len()); - PackageSet::new(package_ids, self.sources, self.config) + PackageSet::new(package_ids, self.sources, self.gctx) } fn ensure_loaded(&mut self, namespace: SourceId, kind: Kind) -> CargoResult<()> { @@ -203,17 +203,17 @@ impl<'cfg> PackageRegistry<'cfg> { Ok(()) } - pub fn add_preloaded(&mut self, source: Box) { + pub fn add_preloaded(&mut self, source: Box) { self.add_source(source, Kind::Locked); } - fn add_source(&mut self, source: Box, kind: Kind) { + fn add_source(&mut self, source: Box, kind: Kind) { let id = source.source_id(); self.sources.insert(source); self.source_ids.insert(id, (id, kind)); } - pub fn add_override(&mut self, source: Box) { + pub fn add_override(&mut self, source: Box) { self.overrides.push(source.source_id()); self.add_source(source, Kind::Override); } @@ -311,7 +311,7 @@ impl<'cfg> PackageRegistry<'cfg> { ); if dep.features().len() != 0 || !dep.uses_default_features() { - self.source_config.config().shell().warn(format!( + self.source_config.gctx().shell().warn(format!( "patch for `{}` uses the features mechanism. \ default-features and features will not take effect because the patch dependency does not support this mechanism", dep.package_name() @@ -558,7 +558,7 @@ https://doc.rust-lang.org/cargo/reference/overriding-dependencies.html dep.package_name(), boilerplate ); - self.source_config.config().shell().warn(&msg)?; + self.source_config.gctx().shell().warn(&msg)?; return Ok(()); } @@ -571,7 +571,7 @@ https://doc.rust-lang.org/cargo/reference/overriding-dependencies.html dep.package_name(), boilerplate ); - self.source_config.config().shell().warn(&msg)?; + self.source_config.gctx().shell().warn(&msg)?; return Ok(()); } @@ -579,7 +579,7 @@ https://doc.rust-lang.org/cargo/reference/overriding-dependencies.html } } -impl<'cfg> Registry for PackageRegistry<'cfg> { +impl<'gctx> Registry for PackageRegistry<'gctx> { fn query( &mut self, dep: &Dependency, diff --git a/src/cargo/core/resolver/conflict_cache.rs b/src/cargo/core/resolver/conflict_cache.rs index fba497506d4..a3c57fcbc38 100644 --- a/src/cargo/core/resolver/conflict_cache.rs +++ b/src/cargo/core/resolver/conflict_cache.rs @@ -3,7 +3,7 @@ use std::collections::{BTreeMap, HashMap, HashSet}; use tracing::trace; use super::types::ConflictMap; -use crate::core::resolver::Context; +use crate::core::resolver::ResolverContext; use crate::core::{Dependency, PackageId}; /// This is a trie for storing a large number of sets designed to @@ -171,7 +171,7 @@ impl ConflictCache { /// one that will allow for the most jump-back. pub fn find_conflicting( &self, - cx: &Context, + cx: &ResolverContext, dep: &Dependency, must_contain: Option, ) -> Option<&ConflictMap> { @@ -186,7 +186,7 @@ impl ConflictCache { } out } - pub fn conflicting(&self, cx: &Context, dep: &Dependency) -> Option<&ConflictMap> { + pub fn conflicting(&self, cx: &ResolverContext, dep: &Dependency) -> Option<&ConflictMap> { self.find_conflicting(cx, dep, None) } diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs index cfeea209ae1..12660b3ca71 100644 --- a/src/cargo/core/resolver/context.rs +++ b/src/cargo/core/resolver/context.rs @@ -15,7 +15,7 @@ use tracing::debug; // risk of being cloned *a lot* so we want to make this as cheap to clone as // possible. #[derive(Clone)] -pub struct Context { +pub struct ResolverContext { pub age: ContextAge, pub activations: Activations, /// list the features that are activated for each package @@ -70,9 +70,9 @@ impl PackageId { } } -impl Context { - pub fn new() -> Context { - Context { +impl ResolverContext { + pub fn new() -> ResolverContext { + ResolverContext { age: 0, resolve_features: im_rc::HashMap::new(), links: im_rc::HashMap::new(), diff --git a/src/cargo/core/resolver/dep_cache.rs b/src/cargo/core/resolver/dep_cache.rs index 9e8ffd3510c..f91bfa8c30f 100644 --- a/src/cargo/core/resolver/dep_cache.rs +++ b/src/cargo/core/resolver/dep_cache.rs @@ -9,7 +9,7 @@ //! //! This module impl that cache in all the gory details -use crate::core::resolver::context::Context; +use crate::core::resolver::context::ResolverContext; use crate::core::resolver::errors::describe_path_in_context; use crate::core::resolver::types::{ConflictReason, DepInfo, FeaturesSet}; use crate::core::resolver::{ @@ -223,7 +223,7 @@ impl<'a> RegistryQueryer<'a> { /// next obvious question. pub fn build_deps( &mut self, - cx: &Context, + cx: &ResolverContext, parent: Option, candidate: &Summary, opts: &ResolveOpts, diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index cac39d823f9..44a817916e6 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -158,8 +158,8 @@ impl EncodableResolve { let mut checksums = HashMap::new(); let mut version = match self.version { - Some(n @ 5) if ws.config().nightly_features_allowed => { - if ws.config().cli_unstable().next_lockfile_bump { + Some(n @ 5) if ws.gctx().nightly_features_allowed => { + if ws.gctx().cli_unstable().next_lockfile_bump { ResolveVersion::V5 } else { anyhow::bail!("lock file version `{n}` requires `-Znext-lockfile-bump`"); diff --git a/src/cargo/core/resolver/errors.rs b/src/cargo/core/resolver/errors.rs index 4ecce02b5f0..c1faac970de 100644 --- a/src/cargo/core/resolver/errors.rs +++ b/src/cargo/core/resolver/errors.rs @@ -4,10 +4,10 @@ use std::task::Poll; use crate::core::{Dependency, PackageId, Registry, Summary}; use crate::sources::source::QueryKind; use crate::util::edit_distance::edit_distance; -use crate::util::{Config, OptVersionReq, VersionExt}; +use crate::util::{GlobalContext, OptVersionReq, VersionExt}; use anyhow::Error; -use super::context::Context; +use super::context::ResolverContext; use super::types::{ConflictMap, ConflictReason}; /// Error during resolution providing a path of `PackageId`s. @@ -70,18 +70,19 @@ impl From<(PackageId, ConflictReason)> for ActivateError { } pub(super) fn activation_error( - cx: &Context, + resolver_gctx: &ResolverContext, registry: &mut dyn Registry, parent: &Summary, dep: &Dependency, conflicting_activations: &ConflictMap, candidates: &[Summary], - config: Option<&Config>, + gctx: Option<&GlobalContext>, ) -> ResolveError { let to_resolve_err = |err| { ResolveError::new( err, - cx.parents + resolver_gctx + .parents .path_to_bottom(&parent.package_id()) .into_iter() .map(|(node, _)| node) @@ -93,7 +94,10 @@ pub(super) fn activation_error( if !candidates.is_empty() { let mut msg = format!("failed to select a version for `{}`.", dep.package_name()); msg.push_str("\n ... required by "); - msg.push_str(&describe_path_in_context(cx, &parent.package_id())); + msg.push_str(&describe_path_in_context( + resolver_gctx, + &parent.package_id(), + )); msg.push_str("\nversions that meet the requirements `"); msg.push_str(&dep.version_req().to_string()); @@ -137,7 +141,7 @@ pub(super) fn activation_error( msg.push_str("`, but it conflicts with a previous package which links to `"); msg.push_str(link); msg.push_str("` as well:\n"); - msg.push_str(&describe_path_in_context(cx, p)); + msg.push_str(&describe_path_in_context(resolver_gctx, p)); msg.push_str("\nOnly one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. "); msg.push_str("Try to adjust your dependencies so that only one package uses the `links = \""); msg.push_str(link); @@ -206,7 +210,7 @@ pub(super) fn activation_error( for (p, r) in &conflicting_activations { if let ConflictReason::Semver = r { msg.push_str("\n\n previously selected "); - msg.push_str(&describe_path_in_context(cx, p)); + msg.push_str(&describe_path_in_context(resolver_gctx, p)); } } } @@ -274,7 +278,10 @@ pub(super) fn activation_error( registry.describe_source(dep.source_id()), ); msg.push_str("required by "); - msg.push_str(&describe_path_in_context(cx, &parent.package_id())); + msg.push_str(&describe_path_in_context( + resolver_gctx, + &parent.package_id(), + )); // If we have a pre-release candidate, then that may be what our user is looking for if let Some(pre) = candidates.iter().find(|c| c.version().is_prerelease()) { @@ -356,13 +363,16 @@ pub(super) fn activation_error( } msg.push_str(&format!("location searched: {}\n", dep.source_id())); msg.push_str("required by "); - msg.push_str(&describe_path_in_context(cx, &parent.package_id())); + msg.push_str(&describe_path_in_context( + resolver_gctx, + &parent.package_id(), + )); msg }; - if let Some(config) = config { - if config.offline() { + if let Some(gctx) = gctx { + if gctx.offline() { msg.push_str( "\nAs a reminder, you're using offline mode (--offline) \ which can sometimes cause surprising resolution failures, \ @@ -377,7 +387,7 @@ pub(super) fn activation_error( /// Returns String representation of dependency chain for a particular `pkgid` /// within given context. -pub(super) fn describe_path_in_context(cx: &Context, id: &PackageId) -> String { +pub(super) fn describe_path_in_context(cx: &ResolverContext, id: &PackageId) -> String { let iter = cx .parents .path_to_bottom(id) diff --git a/src/cargo/core/resolver/features.rs b/src/cargo/core/resolver/features.rs index f1c2aebcc98..7bf944b7555 100644 --- a/src/cargo/core/resolver/features.rs +++ b/src/cargo/core/resolver/features.rs @@ -172,7 +172,7 @@ impl FeatureOpts { force_all_targets: ForceAllTargets, ) -> CargoResult { let mut opts = FeatureOpts::default(); - let unstable_flags = ws.config().cli_unstable(); + let unstable_flags = ws.gctx().cli_unstable(); let mut enable = |feat_opts: &Vec| { for opt in feat_opts { match opt.as_ref() { @@ -407,13 +407,13 @@ pub type DiffMap = BTreeMap>; /// /// [`resolve`]: Self::resolve /// [module-level documentation]: crate::core::resolver::features -pub struct FeatureResolver<'a, 'cfg> { - ws: &'a Workspace<'cfg>, - target_data: &'a mut RustcTargetData<'cfg>, +pub struct FeatureResolver<'a, 'gctx> { + ws: &'a Workspace<'gctx>, + target_data: &'a mut RustcTargetData<'gctx>, /// The platforms to build for, requested by the user. requested_targets: &'a [CompileKind], resolve: &'a Resolve, - package_set: &'a PackageSet<'cfg>, + package_set: &'a PackageSet<'gctx>, /// Options that change how the feature resolver operates. opts: FeatureOpts, /// Map of features activated for each package. @@ -441,14 +441,14 @@ pub struct FeatureResolver<'a, 'cfg> { HashMap<(PackageId, FeaturesFor, InternedString), HashSet>, } -impl<'a, 'cfg> FeatureResolver<'a, 'cfg> { +impl<'a, 'gctx> FeatureResolver<'a, 'gctx> { /// Runs the resolution algorithm and returns a new [`ResolvedFeatures`] /// with the result. pub fn resolve( - ws: &Workspace<'cfg>, - target_data: &'a mut RustcTargetData<'cfg>, + ws: &Workspace<'gctx>, + target_data: &'a mut RustcTargetData<'gctx>, resolve: &Resolve, - package_set: &'a PackageSet<'cfg>, + package_set: &'a PackageSet<'gctx>, cli_features: &CliFeatures, specs: &[PackageIdSpec], requested_targets: &[CompileKind], @@ -943,7 +943,7 @@ impl<'a, 'cfg> FeatureResolver<'a, 'cfg> { let r_features = self.resolve.features(*pkg_id); if !r_features.iter().eq(features.iter()) { crate::drop_eprintln!( - self.ws.config(), + self.ws.gctx(), "{}/{:?} features mismatch\nresolve: {:?}\nnew: {:?}\n", pkg_id, dep_kind, diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 5a444bbf274..335a75c6aaa 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -67,12 +67,12 @@ use tracing::{debug, trace}; use crate::core::PackageIdSpec; use crate::core::{Dependency, PackageId, Registry, Summary}; -use crate::util::config::Config; +use crate::util::config::GlobalContext; use crate::util::errors::CargoResult; use crate::util::network::PollExt; use crate::util::profile; -use self::context::Context; +use self::context::ResolverContext; use self::dep_cache::RegistryQueryer; use self::features::RequestedFeatures; use self::types::{ConflictMap, ConflictReason, DepsFrame}; @@ -128,39 +128,40 @@ pub fn resolve( registry: &mut dyn Registry, version_prefs: &VersionPreferences, resolve_version: ResolveVersion, - config: Option<&Config>, + gctx: Option<&GlobalContext>, ) -> CargoResult { let _p = profile::start("resolving"); - let first_version = match config { + let first_version = match gctx { Some(config) if config.cli_unstable().direct_minimal_versions => { Some(VersionOrdering::MinimumVersionsFirst) } _ => None, }; let mut registry = RegistryQueryer::new(registry, replacements, version_prefs); - let cx = loop { - let cx = Context::new(); - let cx = activate_deps_loop(cx, &mut registry, summaries, first_version, config)?; + let resolver_ctx = loop { + let resolver_ctx = ResolverContext::new(); + let resolver_ctx = + activate_deps_loop(resolver_ctx, &mut registry, summaries, first_version, gctx)?; if registry.reset_pending() { - break cx; + break resolver_ctx; } else { registry.registry.block_until_ready()?; } }; let mut cksums = HashMap::new(); - for (summary, _) in cx.activations.values() { + for (summary, _) in resolver_ctx.activations.values() { let cksum = summary.checksum().map(|s| s.to_string()); cksums.insert(summary.package_id(), cksum); } - let graph = cx.graph(); - let replacements = cx.resolve_replacements(®istry); - let features = cx + let graph = resolver_ctx.graph(); + let replacements = resolver_ctx.resolve_replacements(®istry); + let features = resolver_ctx .resolve_features .iter() .map(|(k, v)| (*k, v.iter().cloned().collect())) .collect(); - let summaries = cx + let summaries = resolver_ctx .activations .into_iter() .map(|(_key, (summary, _age))| (summary.package_id(), summary)) @@ -189,12 +190,12 @@ pub fn resolve( /// If all dependencies can be activated and resolved to a version in the /// dependency graph, `cx` is returned. fn activate_deps_loop( - mut cx: Context, + mut resolver_ctx: ResolverContext, registry: &mut RegistryQueryer<'_>, summaries: &[(Summary, ResolveOpts)], first_version: Option, - config: Option<&Config>, -) -> CargoResult { + gctx: Option<&GlobalContext>, +) -> CargoResult { let mut backtrack_stack = Vec::new(); let mut remaining_deps = RemainingDeps::new(); @@ -206,7 +207,7 @@ fn activate_deps_loop( for (summary, opts) in summaries { debug!("initial activation: {}", summary.package_id()); let res = activate( - &mut cx, + &mut resolver_ctx, registry, None, summary.clone(), @@ -243,19 +244,19 @@ fn activate_deps_loop( // If we spend a lot of time here (we shouldn't in most cases) then give // a bit of a visual indicator as to what we're doing. - printed.shell_status(config)?; + printed.shell_status(gctx)?; trace!( "{}[{}]>{} {} candidates", parent.name(), - cx.age, + resolver_ctx.age, dep.package_name(), candidates.len() ); let just_here_for_the_error_messages = just_here_for_the_error_messages && past_conflicting_activations - .conflicting(&cx, &dep) + .conflicting(&resolver_ctx, &dep) .is_some(); let mut remaining_candidates = RemainingCandidates::new(&candidates); @@ -277,7 +278,7 @@ fn activate_deps_loop( let mut backtracked = false; loop { - let next = remaining_candidates.next(&mut conflicting_activations, &cx); + let next = remaining_candidates.next(&mut conflicting_activations, &resolver_ctx); let (candidate, has_another) = next.ok_or(()).or_else(|_| { // If we get here then our `remaining_candidates` was just @@ -289,7 +290,7 @@ fn activate_deps_loop( trace!( "{}[{}]>{} -- no candidates", parent.name(), - cx.age, + resolver_ctx.age, dep.package_name() ); @@ -311,7 +312,7 @@ fn activate_deps_loop( if !just_here_for_the_error_messages && !backtracked { past_conflicting_activations.insert(&dep, &conflicting_activations); if let Some(c) = generalize_conflicting( - &cx, + &resolver_ctx, registry, &mut past_conflicting_activations, &parent, @@ -323,7 +324,7 @@ fn activate_deps_loop( } match find_candidate( - &cx, + &resolver_ctx, &mut backtrack_stack, &parent, backtracked, @@ -334,7 +335,7 @@ fn activate_deps_loop( Some((candidate, has_another, frame)) => { // Reset all of our local variables used with the // contents of `frame` to complete our backtrack. - cx = frame.context; + resolver_ctx = frame.context; remaining_deps = frame.remaining_deps; remaining_candidates = frame.remaining_candidates; parent = frame.parent; @@ -347,13 +348,13 @@ fn activate_deps_loop( None => { debug!("no candidates found"); Err(errors::activation_error( - &cx, + &resolver_ctx, registry.registry, &parent, &dep, &conflicting_activations, &candidates, - config, + gctx, )) } } @@ -378,7 +379,7 @@ fn activate_deps_loop( // if we can. let backtrack = if has_another { Some(BacktrackFrame { - context: Context::clone(&cx), + context: ResolverContext::clone(&resolver_ctx), remaining_deps: remaining_deps.clone(), remaining_candidates: remaining_candidates.clone(), parent: Summary::clone(&parent), @@ -401,13 +402,13 @@ fn activate_deps_loop( trace!( "{}[{}]>{} trying {}", parent.name(), - cx.age, + resolver_ctx.age, dep.package_name(), candidate.version() ); let first_version = None; // this is an indirect dependency let res = activate( - &mut cx, + &mut resolver_ctx, registry, Some((&parent, &dep)), candidate, @@ -443,7 +444,7 @@ fn activate_deps_loop( .remaining_siblings .clone() .filter_map(|(ref new_dep, _, _)| { - past_conflicting_activations.conflicting(&cx, new_dep) + past_conflicting_activations.conflicting(&resolver_ctx, new_dep) }) .next() { @@ -481,7 +482,7 @@ fn activate_deps_loop( .filter(|(_, other_dep)| known_related_bad_deps.contains(other_dep)) .filter_map(|(other_parent, other_dep)| { past_conflicting_activations - .find_conflicting(&cx, &other_dep, Some(pid)) + .find_conflicting(&resolver_ctx, &other_dep, Some(pid)) .map(|con| (other_parent, con)) }) .next() @@ -522,7 +523,7 @@ fn activate_deps_loop( let activate_for_error_message = has_past_conflicting_dep && !has_another && { just_here_for_the_error_messages || { find_candidate( - &cx, + &resolver_ctx, &mut backtrack_stack.clone(), &parent, backtracked, @@ -557,7 +558,7 @@ fn activate_deps_loop( trace!( "{}[{}]>{} skipping {} ", parent.name(), - cx.age, + resolver_ctx.age, dep.package_name(), pid.version() ); @@ -599,7 +600,7 @@ fn activate_deps_loop( // for error messages anyway so we can live with a little // imprecision. if let Some(b) = backtrack { - cx = b.context; + resolver_ctx = b.context; } } @@ -609,7 +610,7 @@ fn activate_deps_loop( // so loop back to the top of the function here. } - Ok(cx) + Ok(resolver_ctx) } /// Attempts to activate the summary `candidate` in the context `cx`. @@ -619,7 +620,7 @@ fn activate_deps_loop( /// If `candidate` was activated, this function returns the dependency frame to /// iterate through next. fn activate( - cx: &mut Context, + cx: &mut ResolverContext, registry: &mut RegistryQueryer<'_>, parent: Option<(&Summary, &Dependency)>, candidate: Summary, @@ -694,7 +695,7 @@ fn activate( #[derive(Clone)] struct BacktrackFrame { - context: Context, + context: ResolverContext, remaining_deps: RemainingDeps, remaining_candidates: RemainingCandidates, parent: Summary, @@ -748,7 +749,7 @@ impl RemainingCandidates { fn next( &mut self, conflicting_prev_active: &mut ConflictMap, - cx: &Context, + cx: &ResolverContext, ) -> Option<(Summary, bool)> { for b in self.remaining.by_ref() { let b_id = b.package_id(); @@ -804,7 +805,7 @@ impl RemainingCandidates { /// Attempts to find a new conflict that allows a `find_candidate` better then the input one. /// It will add the new conflict to the cache if one is found. fn generalize_conflicting( - cx: &Context, + cx: &ResolverContext, registry: &mut RegistryQueryer<'_>, past_conflicting_activations: &mut conflict_cache::ConflictCache, parent: &Summary, @@ -934,7 +935,7 @@ fn shortcircuit_max(iter: impl Iterator>) -> Option /// Read /// For several more detailed explanations of the logic here. fn find_candidate( - cx: &Context, + cx: &ResolverContext, backtrack_stack: &mut Vec, parent: &Summary, backtracked: bool, diff --git a/src/cargo/core/resolver/types.rs b/src/cargo/core/resolver/types.rs index 9ef78090ed2..daaf6b1b638 100644 --- a/src/cargo/core/resolver/types.rs +++ b/src/cargo/core/resolver/types.rs @@ -2,7 +2,7 @@ use super::features::{CliFeatures, RequestedFeatures}; use crate::core::{Dependency, PackageId, Summary}; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; -use crate::util::Config; +use crate::util::GlobalContext; use std::cmp::Ordering; use std::collections::{BTreeMap, BTreeSet}; use std::ops::Range; @@ -43,7 +43,7 @@ impl ResolverProgress { .unwrap_or(1), } } - pub fn shell_status(&mut self, config: Option<&Config>) -> CargoResult<()> { + pub fn shell_status(&mut self, gctx: Option<&GlobalContext>) -> CargoResult<()> { // If we spend a lot of time here (we shouldn't in most cases) then give // a bit of a visual indicator as to what we're doing. Only enable this // when stderr is a tty (a human is likely to be watching) to ensure we @@ -54,7 +54,7 @@ impl ResolverProgress { // like `Instant::now` by only checking every N iterations of this loop // to amortize the cost of the current time lookup. self.ticks += 1; - if let Some(config) = config { + if let Some(config) = gctx { if config.shell().is_err_tty() && !self.printed && self.ticks % 1000 == 0 diff --git a/src/cargo/core/source_id.rs b/src/cargo/core/source_id.rs index a197adedcd6..4eb2ad127b3 100644 --- a/src/cargo/core/source_id.rs +++ b/src/cargo/core/source_id.rs @@ -6,8 +6,8 @@ use crate::sources::source::Source; use crate::sources::{DirectorySource, CRATES_IO_DOMAIN, CRATES_IO_INDEX, CRATES_IO_REGISTRY}; use crate::sources::{GitSource, PathSource, RegistrySource}; use crate::util::interning::InternedString; -use crate::util::{config, CanonicalUrl, CargoResult, Config, IntoUrl}; -use anyhow::Context; +use crate::util::{config, CanonicalUrl, CargoResult, GlobalContext, IntoUrl}; +use anyhow::Context as _; use serde::de; use serde::ser; use std::cmp::{self, Ordering}; @@ -249,26 +249,26 @@ impl SourceId { /// /// This is the main cargo registry by default, but it can be overridden in /// a `.cargo/config.toml`. - pub fn crates_io(config: &Config) -> CargoResult { - config.crates_io_source_id() + pub fn crates_io(gctx: &GlobalContext) -> CargoResult { + gctx.crates_io_source_id() } /// Returns the `SourceId` corresponding to the main repository, using the /// sparse HTTP index if allowed. - pub fn crates_io_maybe_sparse_http(config: &Config) -> CargoResult { - if Self::crates_io_is_sparse(config)? { - config.check_registry_index_not_set()?; + pub fn crates_io_maybe_sparse_http(gctx: &GlobalContext) -> CargoResult { + if Self::crates_io_is_sparse(gctx)? { + gctx.check_registry_index_not_set()?; let url = CRATES_IO_HTTP_INDEX.into_url().unwrap(); let key = KeyOf::Registry(CRATES_IO_REGISTRY.into()); SourceId::new(SourceKind::SparseRegistry, url, Some(key)) } else { - Self::crates_io(config) + Self::crates_io(gctx) } } /// Returns whether to access crates.io over the sparse protocol. - pub fn crates_io_is_sparse(config: &Config) -> CargoResult { - let proto: Option> = config.get("registries.crates-io.protocol")?; + pub fn crates_io_is_sparse(gctx: &GlobalContext) -> CargoResult { + let proto: Option> = gctx.get("registries.crates-io.protocol")?; let is_sparse = match proto.as_ref().map(|v| v.val.as_str()) { Some("sparse") => true, Some("git") => false, @@ -282,11 +282,11 @@ impl SourceId { } /// Gets the `SourceId` associated with given name of the remote registry. - pub fn alt_registry(config: &Config, key: &str) -> CargoResult { + pub fn alt_registry(gctx: &GlobalContext, key: &str) -> CargoResult { if key == CRATES_IO_REGISTRY { - return Self::crates_io(config); + return Self::crates_io(gctx); } - let url = config.get_registry_index(key)?; + let url = gctx.get_registry_index(key)?; Self::for_alt_registry(&url, key) } @@ -381,22 +381,22 @@ impl SourceId { /// * `yanked_whitelist` --- Packages allowed to be used, even if they are yanked. pub fn load<'a>( self, - config: &'a Config, + gctx: &'a GlobalContext, yanked_whitelist: &HashSet, ) -> CargoResult> { trace!("loading SourceId; {}", self); match self.inner.kind { - SourceKind::Git(..) => Ok(Box::new(GitSource::new(self, config)?)), + SourceKind::Git(..) => Ok(Box::new(GitSource::new(self, gctx)?)), SourceKind::Path => { let path = self .inner .url .to_file_path() .expect("path sources cannot be remote"); - Ok(Box::new(PathSource::new(&path, self, config))) + Ok(Box::new(PathSource::new(&path, self, gctx))) } SourceKind::Registry | SourceKind::SparseRegistry => Ok(Box::new( - RegistrySource::remote(self, yanked_whitelist, config)?, + RegistrySource::remote(self, yanked_whitelist, gctx)?, )), SourceKind::LocalRegistry => { let path = self @@ -408,7 +408,7 @@ impl SourceId { self, &path, yanked_whitelist, - config, + gctx, ))) } SourceKind::Directory => { @@ -417,7 +417,7 @@ impl SourceId { .url .to_file_path() .expect("path sources cannot be remote"); - Ok(Box::new(DirectorySource::new(&path, self, config))) + Ok(Box::new(DirectorySource::new(&path, self, gctx))) } } } @@ -754,7 +754,7 @@ impl KeyOf { #[cfg(test)] mod tests { use super::{GitReference, SourceId, SourceKind}; - use crate::util::{Config, IntoUrl}; + use crate::util::{GlobalContext, IntoUrl}; #[test] fn github_sources_equal() { @@ -792,8 +792,8 @@ mod tests { #[test] #[cfg(all(target_endian = "little", target_pointer_width = "64"))] fn test_cratesio_hash() { - let config = Config::default().unwrap(); - let crates_io = SourceId::crates_io(&config).unwrap(); + let gctx = GlobalContext::default().unwrap(); + let crates_io = SourceId::crates_io(&gctx).unwrap(); assert_eq!(crate::util::hex::short_hash(&crates_io), "1ecc6299db9ec823"); } diff --git a/src/cargo/core/workspace.rs b/src/cargo/core/workspace.rs index 6bb8188dc3a..eea2d171150 100644 --- a/src/cargo/core/workspace.rs +++ b/src/cargo/core/workspace.rs @@ -25,7 +25,7 @@ use crate::util::edit_distance; use crate::util::errors::{CargoResult, ManifestError}; use crate::util::interning::InternedString; use crate::util::toml::{read_manifest, InheritableFields}; -use crate::util::{config::ConfigRelativePath, Config, Filesystem, IntoUrl}; +use crate::util::{config::ConfigRelativePath, Filesystem, GlobalContext, IntoUrl}; use cargo_util::paths; use cargo_util::paths::normalize_path; use cargo_util_schemas::manifest::RustVersion; @@ -38,8 +38,8 @@ use pathdiff::diff_paths; /// other functions. It's typically through this object that the current /// package is loaded and/or learned about. #[derive(Debug)] -pub struct Workspace<'cfg> { - config: &'cfg Config, +pub struct Workspace<'gctx> { + gctx: &'gctx GlobalContext, // This path is a path to where the current cargo subcommand was invoked // from. That is the `--manifest-path` argument to Cargo, and @@ -48,7 +48,7 @@ pub struct Workspace<'cfg> { // A list of packages found in this workspace. Always includes at least the // package mentioned by `current_manifest`. - packages: Packages<'cfg>, + packages: Packages<'gctx>, // If this workspace includes more than one crate, this points to the root // of the workspace. This is `None` in the case that `[workspace]` is @@ -106,8 +106,8 @@ pub struct Workspace<'cfg> { // Separate structure for tracking loaded packages (to avoid loading anything // twice), and this is separate to help appease the borrow checker. #[derive(Debug)] -struct Packages<'cfg> { - config: &'cfg Config, +struct Packages<'gctx> { + gctx: &'gctx GlobalContext, packages: HashMap, } @@ -180,16 +180,16 @@ pub struct WorkspaceRootConfig { custom_metadata: Option, } -impl<'cfg> Workspace<'cfg> { +impl<'gctx> Workspace<'gctx> { /// Creates a new workspace given the target manifest pointed to by /// `manifest_path`. /// /// This function will construct the entire workspace by determining the /// root and all member packages. It will then validate the workspace /// before returning it, so `Ok` is only returned for valid workspaces. - pub fn new(manifest_path: &Path, config: &'cfg Config) -> CargoResult> { - let mut ws = Workspace::new_default(manifest_path.to_path_buf(), config); - ws.target_dir = config.target_dir()?; + pub fn new(manifest_path: &Path, gctx: &'gctx GlobalContext) -> CargoResult> { + let mut ws = Workspace::new_default(manifest_path.to_path_buf(), gctx); + ws.target_dir = gctx.target_dir()?; if manifest_path.is_relative() { bail!( @@ -209,12 +209,12 @@ impl<'cfg> Workspace<'cfg> { Ok(ws) } - fn new_default(current_manifest: PathBuf, config: &'cfg Config) -> Workspace<'cfg> { + fn new_default(current_manifest: PathBuf, gctx: &'gctx GlobalContext) -> Workspace<'gctx> { Workspace { - config, + gctx, current_manifest, packages: Packages { - config, + gctx, packages: HashMap::new(), }, root_manifest: None, @@ -235,11 +235,11 @@ impl<'cfg> Workspace<'cfg> { root_path: PathBuf, current_manifest: PathBuf, manifest: VirtualManifest, - config: &'cfg Config, - ) -> CargoResult> { - let mut ws = Workspace::new_default(current_manifest, config); + gctx: &'gctx GlobalContext, + ) -> CargoResult> { + let mut ws = Workspace::new_default(current_manifest, gctx); ws.root_manifest = Some(root_path.join("Cargo.toml")); - ws.target_dir = config.target_dir()?; + ws.target_dir = gctx.target_dir()?; ws.packages .packages .insert(root_path, MaybePackage::Virtual(manifest)); @@ -261,11 +261,11 @@ impl<'cfg> Workspace<'cfg> { /// `cargo package`. pub fn ephemeral( package: Package, - config: &'cfg Config, + gctx: &'gctx GlobalContext, target_dir: Option, require_optional_deps: bool, - ) -> CargoResult> { - let mut ws = Workspace::new_default(package.manifest_path().to_path_buf(), config); + ) -> CargoResult> { + let mut ws = Workspace::new_default(package.manifest_path().to_path_buf(), gctx); ws.is_ephemeral = true; ws.require_optional_deps = require_optional_deps; let key = ws.current_manifest.parent().unwrap(); @@ -275,7 +275,7 @@ impl<'cfg> Workspace<'cfg> { ws.target_dir = if let Some(dir) = target_dir { Some(dir) } else { - ws.config.target_dir()? + ws.gctx.target_dir()? }; ws.members.push(ws.current_manifest.clone()); ws.member_ids.insert(id); @@ -349,9 +349,9 @@ impl<'cfg> Workspace<'cfg> { } } - /// Returns the `Config` this workspace is associated with. - pub fn config(&self) -> &'cfg Config { - self.config + /// Returns the `GlobalContext` this workspace is associated with. + pub fn gctx(&self) -> &'gctx GlobalContext { + self.gctx } pub fn profiles(&self) -> Option<&TomlProfiles> { @@ -396,7 +396,7 @@ impl<'cfg> Workspace<'cfg> { rel_path.push(&hash[0..2]); rel_path.push(&hash[2..]); - self.config().home().join(rel_path) + self.gctx().home().join(rel_path) } else { Filesystem::new(self.root().join("target")) } @@ -415,7 +415,7 @@ impl<'cfg> Workspace<'cfg> { fn config_patch(&self) -> CargoResult>> { let config_patch: Option< BTreeMap>>, - > = self.config.get("patch")?; + > = self.gctx.get("patch")?; let source = SourceId::for_path(self.root())?; @@ -427,7 +427,7 @@ impl<'cfg> Workspace<'cfg> { let url = match &url[..] { CRATES_IO_REGISTRY => CRATES_IO_INDEX.parse().unwrap(), url => self - .config + .gctx .get_registry_index(url) .or_else(|_| url.into_url()) .with_context(|| { @@ -443,7 +443,7 @@ impl<'cfg> Workspace<'cfg> { name, source, &mut nested_paths, - self.config, + self.gctx, &mut warnings, /* platform */ None, // NOTE: Since we use ConfigRelativePath, this root isn't used as @@ -458,7 +458,7 @@ impl<'cfg> Workspace<'cfg> { } for message in warnings { - self.config + self.gctx .shell() .warn(format!("[patch] in cargo config: {}", message))? } @@ -586,7 +586,7 @@ impl<'cfg> Workspace<'cfg> { pub fn set_require_optional_deps( &mut self, require_optional_deps: bool, - ) -> &mut Workspace<'cfg> { + ) -> &mut Workspace<'gctx> { self.require_optional_deps = require_optional_deps; self } @@ -595,7 +595,7 @@ impl<'cfg> Workspace<'cfg> { self.ignore_lock } - pub fn set_ignore_lock(&mut self, ignore_lock: bool) -> &mut Workspace<'cfg> { + pub fn set_ignore_lock(&mut self, ignore_lock: bool) -> &mut Workspace<'gctx> { self.ignore_lock = ignore_lock; self } @@ -649,7 +649,7 @@ impl<'cfg> Workspace<'cfg> { debug!("find_root - is root {}", manifest_path.display()); Ok(Some(root_path)) } - None => find_workspace_root_with_loader(manifest_path, self.config, |self_path| { + None => find_workspace_root_with_loader(manifest_path, self.gctx, |self_path| { Ok(self .packages .load(self_path)? @@ -1001,7 +1001,7 @@ impl<'cfg> Workspace<'cfg> { pkg.manifest_path().display(), root_manifest.display(), ); - self.config.shell().warn(&msg) + self.gctx.shell().warn(&msg) }; if manifest.original().has_profiles() { emit_warning("profiles")?; @@ -1029,16 +1029,16 @@ impl<'cfg> Workspace<'cfg> { .max() { let resolver = edition.default_resolve_behavior().to_manifest(); - self.config.shell().warn(format_args!( + self.gctx.shell().warn(format_args!( "virtual workspace defaulting to `resolver = \"1\"` despite one or more workspace members being on edition {edition} which implies `resolver = \"{resolver}\"`" ))?; - self.config.shell().note( + self.gctx.shell().note( "to keep the current resolver, specify `workspace.resolver = \"1\"` in the workspace root's manifest", )?; - self.config.shell().note(format_args!( + self.gctx.shell().note(format_args!( "to use the edition {edition} resolver, specify `workspace.resolver = \"{resolver}\"` in the workspace root's manifest" ))?; - self.config.shell().note( + self.gctx.shell().note( "for more details see https://doc.rust-lang.org/cargo/reference/resolver.html#resolver-versions", )?; } @@ -1060,7 +1060,7 @@ impl<'cfg> Workspace<'cfg> { return Ok(p); } let source_id = SourceId::for_path(manifest_path.parent().unwrap())?; - let (package, _nested_paths) = ops::read_package(manifest_path, source_id, self.config)?; + let (package, _nested_paths) = ops::read_package(manifest_path, source_id, self.gctx)?; loaded.insert(manifest_path.to_path_buf(), package.clone()); Ok(package) } @@ -1071,7 +1071,7 @@ impl<'cfg> Workspace<'cfg> { /// for various operations, and this preload step avoids doubly-loading and /// parsing crates on the filesystem by inserting them all into the registry /// with their in-memory formats. - pub fn preload(&self, registry: &mut PackageRegistry<'cfg>) { + pub fn preload(&self, registry: &mut PackageRegistry<'gctx>) { // These can get weird as this generally represents a workspace during // `cargo install`. Things like git repositories will actually have a // `PathSource` with multiple entries in it, so the logic below is @@ -1086,7 +1086,7 @@ impl<'cfg> Workspace<'cfg> { MaybePackage::Package(ref p) => p.clone(), MaybePackage::Virtual(_) => continue, }; - let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), self.config); + let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), self.gctx); src.preload_with(pkg); registry.add_preloaded(Box::new(src)); } @@ -1113,7 +1113,7 @@ impl<'cfg> Workspace<'cfg> { // originated, so include the path. format!("{}: {}", path.display(), warning.message) }; - self.config.shell().warn(msg)? + self.gctx.shell().warn(msg)? } } } @@ -1570,7 +1570,7 @@ impl<'cfg> Workspace<'cfg> { } } -impl<'cfg> Packages<'cfg> { +impl<'gctx> Packages<'gctx> { fn get(&self, manifest_path: &Path) -> &MaybePackage { self.maybe_get(manifest_path).unwrap() } @@ -1593,8 +1593,7 @@ impl<'cfg> Packages<'cfg> { Entry::Occupied(e) => Ok(e.into_mut()), Entry::Vacant(v) => { let source_id = SourceId::for_path(key)?; - let (manifest, _nested_paths) = - read_manifest(manifest_path, source_id, self.config)?; + let (manifest, _nested_paths) = read_manifest(manifest_path, source_id, self.gctx)?; Ok(v.insert(match manifest { EitherManifest::Real(manifest) => { MaybePackage::Package(Package::new(manifest, manifest_path)) @@ -1737,11 +1736,14 @@ pub fn resolve_relative_path( } /// Finds the path of the root of the workspace. -pub fn find_workspace_root(manifest_path: &Path, config: &Config) -> CargoResult> { - find_workspace_root_with_loader(manifest_path, config, |self_path| { +pub fn find_workspace_root( + manifest_path: &Path, + gctx: &GlobalContext, +) -> CargoResult> { + find_workspace_root_with_loader(manifest_path, gctx, |self_path| { let key = self_path.parent().unwrap(); let source_id = SourceId::for_path(key)?; - let (manifest, _nested_paths) = read_manifest(self_path, source_id, config)?; + let (manifest, _nested_paths) = read_manifest(self_path, source_id, gctx)?; Ok(manifest .workspace_config() .get_ws_root(self_path, manifest_path)) @@ -1754,12 +1756,12 @@ pub fn find_workspace_root(manifest_path: &Path, config: &Config) -> CargoResult /// workspace root is. fn find_workspace_root_with_loader( manifest_path: &Path, - config: &Config, + gctx: &GlobalContext, mut loader: impl FnMut(&Path) -> CargoResult>, ) -> CargoResult> { // Check if there are any workspace roots that have already been found that would work { - let roots = config.ws_roots.borrow(); + let roots = gctx.ws_roots.borrow(); // Iterate through the manifests parent directories until we find a workspace // root. Note we skip the first item since that is just the path itself for current in manifest_path.ancestors().skip(1) { @@ -1772,7 +1774,7 @@ fn find_workspace_root_with_loader( } } - for ances_manifest_path in find_root_iter(manifest_path, config) { + for ances_manifest_path in find_root_iter(manifest_path, gctx) { debug!("find_root - trying {}", ances_manifest_path.display()); if let Some(ws_root_path) = loader(&ances_manifest_path)? { return Ok(Some(ws_root_path)); @@ -1793,7 +1795,7 @@ fn read_root_pointer(member_manifest: &Path, root_link: &str) -> PathBuf { fn find_root_iter<'a>( manifest_path: &'a Path, - config: &'a Config, + gctx: &'a GlobalContext, ) -> impl Iterator + 'a { LookBehind::new(paths::ancestors(manifest_path, None).skip(2)) .take_while(|path| !path.curr.ends_with("target/package")) @@ -1804,7 +1806,7 @@ fn find_root_iter<'a>( // crates.io crates into the workspace by accident. .take_while(|path| { if let Some(last) = path.last { - config.home() != last + gctx.home() != last } else { true } diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs index d0c65084ced..61fac71540a 100644 --- a/src/cargo/lib.rs +++ b/src/cargo/lib.rs @@ -30,8 +30,8 @@ //! The [`BuildContext`][core::compiler::BuildContext] is the result of the "front end" of the //! build process. This contains the graph of work to perform and any settings necessary for //! `rustc`. After this is built, the next stage of building is handled in -//! [`Context`][core::compiler::Context]. -//! - [`core::compiler::context`]: +//! [`BuildRunner`][core::compiler::BuildRunner]. +//! - [`core::compiler::build_runner`]: //! The `Context` is the mutable state used during the build process. This //! is the core of the build process, and everything is coordinated through //! this. @@ -47,7 +47,7 @@ //! - [`util::config`]: //! This directory contains the config parser. It makes heavy use of //! [serde](https://serde.rs/) to merge and translate config values. The -//! [`util::Config`] is usually accessed from the +//! [`util::GlobalContext`] is usually accessed from the //! [`core::Workspace`] //! though references to it are scattered around for more convenient access. //! - [`util::toml`]: @@ -111,7 +111,7 @@ //! - `target/debug/.fingerprint`: Tracker whether nor not a crate needs to be rebuilt. See [`core::compiler::fingerprint`] //! - `$CARGO_HOME/`: //! - `registry/`: Package registry cache which is managed in [`sources::registry`]. Be careful -//! as the lock [`util::Config::acquire_package_cache_lock`] must be manually acquired. +//! as the lock [`util::GlobalContext::acquire_package_cache_lock`] must be manually acquired. //! - `index`/: Fast-to-access crate metadata (no need to download / extract `*.crate` files) //! - `cache/*/*.crate`: Local cache of published crates //! - `src/*/*`: Extracted from `*.crate` by [`sources::registry::RegistrySource`] @@ -143,7 +143,7 @@ use anyhow::Error; use tracing::debug; pub use crate::util::errors::{AlreadyPrintedError, InternalError, VerboseError}; -pub use crate::util::{indented_lines, CargoResult, CliError, CliResult, Config}; +pub use crate::util::{indented_lines, CargoResult, CliError, CliResult, GlobalContext}; pub use crate::version::version; pub const CARGO_ENV: &str = "CARGO"; diff --git a/src/cargo/ops/cargo_add/mod.rs b/src/cargo/ops/cargo_add/mod.rs index 609793efa0f..40a4a302196 100644 --- a/src/cargo/ops/cargo_add/mod.rs +++ b/src/cargo/ops/cargo_add/mod.rs @@ -37,14 +37,14 @@ use crate::util::toml_mut::is_sorted; use crate::util::toml_mut::manifest::DepTable; use crate::util::toml_mut::manifest::LocalManifest; use crate::CargoResult; -use crate::Config; +use crate::GlobalContext; use crate_spec::CrateSpec; /// Information on what dependencies should be added #[derive(Clone, Debug)] pub struct AddOptions<'a> { /// Configuration information for cargo operations - pub config: &'a Config, + pub gctx: &'a GlobalContext, /// Package to add dependencies to pub spec: &'a Package, /// Dependencies to add or modify @@ -77,11 +77,11 @@ pub fn add(workspace: &Workspace<'_>, options: &AddOptions<'_>) -> CargoResult<( ); } - let mut registry = PackageRegistry::new(options.config)?; + let mut registry = PackageRegistry::new(options.gctx)?; let deps = { let _lock = options - .config + .gctx .acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?; registry.lock_patches(); options @@ -95,7 +95,7 @@ pub fn add(workspace: &Workspace<'_>, options: &AddOptions<'_>) -> CargoResult<( &options.spec, &options.section, options.honor_rust_version, - options.config, + options.gctx, &mut registry, ) }) @@ -114,7 +114,7 @@ pub fn add(workspace: &Workspace<'_>, options: &AddOptions<'_>) -> CargoResult<( }) }); for dep in deps { - print_action_msg(&mut options.config.shell(), &dep, &dep_table)?; + print_action_msg(&mut options.gctx.shell(), &dep, &dep_table)?; if let Some(Source::Path(src)) = dep.source() { if src.path == manifest.path.parent().unwrap_or_else(|| Path::new("")) { anyhow::bail!( @@ -194,7 +194,7 @@ pub fn add(workspace: &Workspace<'_>, options: &AddOptions<'_>) -> CargoResult<( anyhow::bail!(message.trim().to_owned()); } - print_dep_table_msg(&mut options.config.shell(), &dep)?; + print_dep_table_msg(&mut options.gctx.shell(), &dep)?; manifest.insert_into_table(&dep_table, &dep)?; if dep.optional == Some(true) { @@ -209,7 +209,7 @@ pub fn add(workspace: &Workspace<'_>, options: &AddOptions<'_>) -> CargoResult<( [format!("dep:{dep_name}")].iter().collect(); table[dep_key] = toml_edit::value(new_feature); options - .config + .gctx .shell() .status("Adding", format!("feature `{dep_key}`"))?; } @@ -228,7 +228,7 @@ pub fn add(workspace: &Workspace<'_>, options: &AddOptions<'_>) -> CargoResult<( } } - if options.config.locked() { + if options.gctx.locked() { let new_raw_manifest = manifest.to_string(); if original_raw_manifest != new_raw_manifest { anyhow::bail!( @@ -239,7 +239,7 @@ pub fn add(workspace: &Workspace<'_>, options: &AddOptions<'_>) -> CargoResult<( } if options.dry_run { - options.config.shell().warn("aborting add due to dry run")?; + options.gctx.shell().warn("aborting add due to dry run")?; } else { manifest.write()?; } @@ -288,7 +288,7 @@ fn resolve_dependency( spec: &Package, section: &DepTable, honor_rust_version: bool, - config: &Config, + gctx: &GlobalContext, registry: &mut PackageRegistry<'_>, ) -> CargoResult { let crate_spec = arg @@ -314,16 +314,16 @@ fn resolve_dependency( anyhow::bail!("cannot specify a git URL (`{url}`) with a version (`{v}`)."); } let dependency = crate_spec.to_dependency()?.set_source(src); - let selected = select_package(&dependency, config, registry)?; + let selected = select_package(&dependency, gctx, registry)?; if dependency.name != selected.name { - config.shell().warn(format!( + gctx.shell().warn(format!( "translating `{}` to `{}`", dependency.name, selected.name, ))?; } selected } else { - let mut source = crate::sources::GitSource::new(src.source_id()?, config)?; + let mut source = crate::sources::GitSource::new(src.source_id()?, gctx)?; let packages = source.read_packages()?; let package = infer_package_for_git_source(packages, &src)?; Dependency::from(package.summary()) @@ -339,16 +339,16 @@ fn resolve_dependency( anyhow::bail!("cannot specify a path (`{raw_path}`) with a version (`{v}`)."); } let dependency = crate_spec.to_dependency()?.set_source(src); - let selected = select_package(&dependency, config, registry)?; + let selected = select_package(&dependency, gctx, registry)?; if dependency.name != selected.name { - config.shell().warn(format!( + gctx.shell().warn(format!( "translating `{}` to `{}`", dependency.name, selected.name, ))?; } selected } else { - let source = crate::sources::PathSource::new(&path, src.source_id()?, config); + let source = crate::sources::PathSource::new(&path, src.source_id()?, gctx); let package = source .read_packages()? .pop() @@ -404,12 +404,12 @@ fn resolve_dependency( &dependency, false, honor_rust_version, - config, + gctx, registry, )?; if dependency.name != latest.name { - config.shell().warn(format!( + gctx.shell().warn(format!( "translating `{}` to `{}`", dependency.name, latest.name, ))?; @@ -434,14 +434,14 @@ fn resolve_dependency( dependency = dependency.clear_version(); } - let query = dependency.query(config)?; + let query = dependency.query(gctx)?; let query = match query { MaybeWorkspace::Workspace(_workspace) => { let dep = find_workspace_dep(dependency.toml_key(), ws.root_manifest())?; if let Some(features) = dep.features.clone() { dependency = dependency.set_inherited_features(features); } - let query = dep.query(config)?; + let query = dep.query(gctx)?; match query { MaybeWorkspace::Workspace(_) => { unreachable!("This should have been caught when parsing a workspace root") @@ -578,10 +578,10 @@ fn get_latest_dependency( dependency: &Dependency, _flag_allow_prerelease: bool, honor_rust_version: bool, - config: &Config, + gctx: &GlobalContext, registry: &mut PackageRegistry<'_>, ) -> CargoResult { - let query = dependency.query(config)?; + let query = dependency.query(gctx)?; match query { MaybeWorkspace::Workspace(_) => { unreachable!("registry dependencies required, found a workspace dependency"); @@ -614,7 +614,7 @@ fn get_latest_dependency( ) })?; - if config.cli_unstable().msrv_policy && honor_rust_version { + if gctx.cli_unstable().msrv_policy && honor_rust_version { fn parse_msrv(comp: &RustVersion) -> (u64, u64, u64) { (comp.major, comp.minor.unwrap_or(0), comp.patch.unwrap_or(0)) } @@ -650,7 +650,7 @@ fn get_latest_dependency( })?; if latest_msrv.version() < latest.version() { - config.shell().warn(format_args!( + gctx.shell().warn(format_args!( "ignoring `{dependency}@{latest_version}` (which has a rust-version of \ {latest_rust_version}) to satisfy this package's rust-version of \ {rust_version} (use `--ignore-rust-version` to override)", @@ -700,10 +700,10 @@ fn rust_version_incompat_error( fn select_package( dependency: &Dependency, - config: &Config, + gctx: &GlobalContext, registry: &mut PackageRegistry<'_>, ) -> CargoResult { - let query = dependency.query(config)?; + let query = dependency.query(gctx)?; match query { MaybeWorkspace::Workspace(_) => { unreachable!("path or git dependency expected, found workspace dependency"); diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index 4add5d86326..d35e69c4446 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -5,14 +5,14 @@ use crate::ops; use crate::util::edit_distance; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; -use crate::util::{human_readable_bytes, Config, Progress, ProgressStyle}; +use crate::util::{human_readable_bytes, GlobalContext, Progress, ProgressStyle}; use anyhow::bail; use cargo_util::paths; use std::fs; use std::path::{Path, PathBuf}; -pub struct CleanOptions<'cfg> { - pub config: &'cfg Config, +pub struct CleanOptions<'gctx> { + pub gctx: &'gctx GlobalContext, /// A list of packages to clean. If empty, everything is cleaned. pub spec: Vec, /// The target arch triple to clean, or None for the host arch @@ -27,9 +27,9 @@ pub struct CleanOptions<'cfg> { pub dry_run: bool, } -pub struct CleanContext<'cfg> { - pub config: &'cfg Config, - progress: Box, +pub struct CleanContext<'gctx> { + pub gctx: &'gctx GlobalContext, + progress: Box, pub dry_run: bool, num_files_removed: u64, num_dirs_removed: u64, @@ -39,9 +39,9 @@ pub struct CleanContext<'cfg> { /// Cleans various caches. pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { let mut target_dir = ws.target_dir(); - let config = opts.config; - let mut ctx = CleanContext::new(config); - ctx.dry_run = opts.dry_run; + let gctx = opts.gctx; + let mut clean_gctx = CleanContext::new(gctx); + clean_gctx.dry_run = opts.dry_run; if opts.doc { if !opts.spec.is_empty() { @@ -55,7 +55,7 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { } // If the doc option is set, we just want to delete the doc directory. target_dir = target_dir.join("doc"); - ctx.remove_paths(&[target_dir.into_path_unlocked()])?; + clean_gctx.remove_paths(&[target_dir.into_path_unlocked()])?; } else { let profiles = Profiles::new(&ws, opts.requested_profile)?; @@ -73,25 +73,25 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { // Note that we don't bother grabbing a lock here as we're just going to // blow it all away anyway. if opts.spec.is_empty() { - ctx.remove_paths(&[target_dir.into_path_unlocked()])?; + clean_gctx.remove_paths(&[target_dir.into_path_unlocked()])?; } else { - clean_specs(&mut ctx, &ws, &profiles, &opts.targets, &opts.spec)?; + clean_specs(&mut clean_gctx, &ws, &profiles, &opts.targets, &opts.spec)?; } } - ctx.display_summary()?; + clean_gctx.display_summary()?; Ok(()) } fn clean_specs( - ctx: &mut CleanContext<'_>, + clean_ctx: &mut CleanContext<'_>, ws: &Workspace<'_>, profiles: &Profiles, targets: &[String], spec: &[String], ) -> CargoResult<()> { // Clean specific packages. - let requested_kinds = CompileKind::from_requested_targets(ctx.config, targets)?; + let requested_kinds = CompileKind::from_requested_targets(clean_ctx.gctx, targets)?; let target_data = RustcTargetData::new(ws, &requested_kinds)?; let (pkg_set, resolve) = ops::resolve_ws(ws)?; let prof_dir_name = profiles.get_dir_name(); @@ -134,7 +134,7 @@ fn clean_specs( // Translate the spec to a Package. let spec = PackageIdSpec::parse(spec_str)?; if spec.partial_version().is_some() { - ctx.config.shell().warn(&format!( + clean_ctx.gctx.shell().warn(&format!( "version qualifier in `-p {}` is ignored, \ cleaning all versions of `{}` found", spec_str, @@ -142,7 +142,7 @@ fn clean_specs( ))?; } if spec.url().is_some() { - ctx.config.shell().warn(&format!( + clean_ctx.gctx.shell().warn(&format!( "url qualifier in `-p {}` ignored, \ cleaning all versions of `{}` found", spec_str, @@ -167,16 +167,17 @@ fn clean_specs( } let packages = pkg_set.get_many(pkg_ids)?; - ctx.progress = Box::new(CleaningPackagesBar::new(ctx.config, packages.len())); + clean_ctx.progress = Box::new(CleaningPackagesBar::new(clean_ctx.gctx, packages.len())); for pkg in packages { let pkg_dir = format!("{}-*", pkg.name()); - ctx.progress.on_cleaning_package(&pkg.name())?; + clean_ctx.progress.on_cleaning_package(&pkg.name())?; // Clean fingerprints. for (_, layout) in &layouts_with_host { let dir = escape_glob_path(layout.fingerprint())?; - ctx.rm_rf_package_glob_containing_hash(&pkg.name(), &Path::new(&dir).join(&pkg_dir))?; + clean_ctx + .rm_rf_package_glob_containing_hash(&pkg.name(), &Path::new(&dir).join(&pkg_dir))?; } for target in pkg.targets() { @@ -184,7 +185,7 @@ fn clean_specs( // Get both the build_script_build and the output directory. for (_, layout) in &layouts_with_host { let dir = escape_glob_path(layout.build())?; - ctx.rm_rf_package_glob_containing_hash( + clean_ctx.rm_rf_package_glob_containing_hash( &pkg.name(), &Path::new(&dir).join(&pkg_dir), )?; @@ -218,35 +219,35 @@ fn clean_specs( let dir_glob = escape_glob_path(dir)?; let dir_glob = Path::new(&dir_glob); - ctx.rm_rf_glob(&dir_glob.join(&hashed_name))?; - ctx.rm_rf(&dir.join(&unhashed_name))?; + clean_ctx.rm_rf_glob(&dir_glob.join(&hashed_name))?; + clean_ctx.rm_rf(&dir.join(&unhashed_name))?; // Remove dep-info file generated by rustc. It is not tracked in // file_types. It does not have a prefix. let hashed_dep_info = dir_glob.join(format!("{}-*.d", crate_name)); - ctx.rm_rf_glob(&hashed_dep_info)?; + clean_ctx.rm_rf_glob(&hashed_dep_info)?; let unhashed_dep_info = dir.join(format!("{}.d", crate_name)); - ctx.rm_rf(&unhashed_dep_info)?; + clean_ctx.rm_rf(&unhashed_dep_info)?; // Remove split-debuginfo files generated by rustc. let split_debuginfo_obj = dir_glob.join(format!("{}.*.o", crate_name)); - ctx.rm_rf_glob(&split_debuginfo_obj)?; + clean_ctx.rm_rf_glob(&split_debuginfo_obj)?; let split_debuginfo_dwo = dir_glob.join(format!("{}.*.dwo", crate_name)); - ctx.rm_rf_glob(&split_debuginfo_dwo)?; + clean_ctx.rm_rf_glob(&split_debuginfo_dwo)?; let split_debuginfo_dwp = dir_glob.join(format!("{}.*.dwp", crate_name)); - ctx.rm_rf_glob(&split_debuginfo_dwp)?; + clean_ctx.rm_rf_glob(&split_debuginfo_dwp)?; // Remove the uplifted copy. if let Some(uplift_dir) = uplift_dir { let uplifted_path = uplift_dir.join(file_type.uplift_filename(target)); - ctx.rm_rf(&uplifted_path)?; + clean_ctx.rm_rf(&uplifted_path)?; // Dep-info generated by Cargo itself. let dep_info = uplifted_path.with_extension("d"); - ctx.rm_rf(&dep_info)?; + clean_ctx.rm_rf(&dep_info)?; } } // TODO: what to do about build_script_build? let dir = escape_glob_path(layout.incremental())?; let incremental = Path::new(&dir).join(format!("{}-*", crate_name)); - ctx.rm_rf_glob(&incremental)?; + clean_ctx.rm_rf_glob(&incremental)?; } } } @@ -262,13 +263,13 @@ fn escape_glob_path(pattern: &Path) -> CargoResult { Ok(glob::Pattern::escape(pattern)) } -impl<'cfg> CleanContext<'cfg> { - pub fn new(config: &'cfg Config) -> Self { +impl<'gctx> CleanContext<'gctx> { + pub fn new(gctx: &'gctx GlobalContext) -> Self { // This progress bar will get replaced, this is just here to avoid needing // an Option until the actual bar is created. - let progress = CleaningFolderBar::new(config, 0); + let progress = CleaningFolderBar::new(gctx, 0); CleanContext { - config, + gctx, progress: Box::new(progress), dry_run: false, num_files_removed: 0, @@ -326,7 +327,7 @@ impl<'cfg> CleanContext<'cfg> { Ok(meta) => meta, Err(e) => { if e.kind() != std::io::ErrorKind::NotFound { - self.config + self.gctx .shell() .warn(&format!("cannot access {}: {e}", path.display()))?; } @@ -336,7 +337,7 @@ impl<'cfg> CleanContext<'cfg> { // dry-run displays paths while walking, so don't print here. if !self.dry_run { - self.config + self.gctx .shell() .verbose(|shell| shell.status("Removing", path.display()))?; } @@ -368,7 +369,7 @@ impl<'cfg> CleanContext<'cfg> { // like it can be surprising or even frightening if cargo says it // is removing something without actually removing it. And I can't // come up with a different verb to use as the status. - self.config + self.gctx .shell() .verbose(|shell| Ok(writeln!(shell.out(), "{}", entry.path().display())?))?; } @@ -414,11 +415,11 @@ impl<'cfg> CleanContext<'cfg> { (1, _) => format!("1 file"), (2.., _) => format!("{} files", self.num_files_removed), }; - self.config + self.gctx .shell() .status(status, format!("{file_count}{byte_count}"))?; if self.dry_run { - self.config + self.gctx .shell() .warn("no files deleted due to --dry-run")?; } @@ -435,7 +436,7 @@ impl<'cfg> CleanContext<'cfg> { .iter() .map(|path| walkdir::WalkDir::new(path).into_iter().count()) .sum(); - self.progress = Box::new(CleaningFolderBar::new(self.config, num_paths)); + self.progress = Box::new(CleaningFolderBar::new(self.gctx, num_paths)); for path in paths { self.rm_rf(path)?; } @@ -451,16 +452,16 @@ trait CleaningProgressBar { } } -struct CleaningFolderBar<'cfg> { - bar: Progress<'cfg>, +struct CleaningFolderBar<'gctx> { + bar: Progress<'gctx>, max: usize, cur: usize, } -impl<'cfg> CleaningFolderBar<'cfg> { - fn new(cfg: &'cfg Config, max: usize) -> Self { +impl<'gctx> CleaningFolderBar<'gctx> { + fn new(gctx: &'gctx GlobalContext, max: usize) -> Self { Self { - bar: Progress::with_style("Cleaning", ProgressStyle::Percentage, cfg), + bar: Progress::with_style("Cleaning", ProgressStyle::Percentage, gctx), max, cur: 0, } @@ -471,7 +472,7 @@ impl<'cfg> CleaningFolderBar<'cfg> { } } -impl<'cfg> CleaningProgressBar for CleaningFolderBar<'cfg> { +impl<'gctx> CleaningProgressBar for CleaningFolderBar<'gctx> { fn display_now(&mut self) -> CargoResult<()> { self.bar.tick_now(self.cur_progress(), self.max, "") } @@ -482,18 +483,18 @@ impl<'cfg> CleaningProgressBar for CleaningFolderBar<'cfg> { } } -struct CleaningPackagesBar<'cfg> { - bar: Progress<'cfg>, +struct CleaningPackagesBar<'gctx> { + bar: Progress<'gctx>, max: usize, cur: usize, num_files_folders_cleaned: usize, package_being_cleaned: String, } -impl<'cfg> CleaningPackagesBar<'cfg> { - fn new(cfg: &'cfg Config, max: usize) -> Self { +impl<'gctx> CleaningPackagesBar<'gctx> { + fn new(gctx: &'gctx GlobalContext, max: usize) -> Self { Self { - bar: Progress::with_style("Cleaning", ProgressStyle::Ratio, cfg), + bar: Progress::with_style("Cleaning", ProgressStyle::Ratio, gctx), max, cur: 0, num_files_folders_cleaned: 0, @@ -513,7 +514,7 @@ impl<'cfg> CleaningPackagesBar<'cfg> { } } -impl<'cfg> CleaningProgressBar for CleaningPackagesBar<'cfg> { +impl<'gctx> CleaningProgressBar for CleaningPackagesBar<'gctx> { fn display_now(&mut self) -> CargoResult<()> { self.bar .tick_now(self.cur_progress(), self.max, &self.format_message()) diff --git a/src/cargo/ops/cargo_compile/mod.rs b/src/cargo/ops/cargo_compile/mod.rs index d458b940e27..c7a84a087d9 100644 --- a/src/cargo/ops/cargo_compile/mod.rs +++ b/src/cargo/ops/cargo_compile/mod.rs @@ -13,7 +13,7 @@ //! from the resolver. See also [`unit_dependencies`]. //! 5. Construct the [`BuildContext`] with all of the information collected so //! far. This is the end of the "front end" of compilation. -//! 6. Create a [`Context`] which coordinates the compilation process +//! 6. Create a [`BuildRunner`] which coordinates the compilation process //! and will perform the following steps: //! 1. Prepare the `target` directory (see [`Layout`]). //! 2. Create a [`JobQueue`]. The queue checks the @@ -42,7 +42,7 @@ use std::sync::Arc; use crate::core::compiler::unit_dependencies::build_unit_dependencies; use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph}; use crate::core::compiler::{standard_lib, CrateType, TargetInfo}; -use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context}; +use crate::core::compiler::{BuildConfig, BuildContext, BuildRunner, Compilation}; use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, RustcTargetData, Unit}; use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner}; use crate::core::profiles::Profiles; @@ -52,7 +52,7 @@ use crate::core::{PackageId, PackageSet, SourceId, TargetKind, Workspace}; use crate::drop_println; use crate::ops; use crate::ops::resolve::WorkspaceResolve; -use crate::util::config::Config; +use crate::util::config::GlobalContext; use crate::util::interning::InternedString; use crate::util::{profile, CargoResult, StableHasher}; @@ -101,11 +101,11 @@ pub struct CompileOptions { } impl CompileOptions { - pub fn new(config: &Config, mode: CompileMode) -> CargoResult { + pub fn new(gctx: &GlobalContext, mode: CompileMode) -> CargoResult { let jobs = None; let keep_going = false; Ok(CompileOptions { - build_config: BuildConfig::new(config, jobs, keep_going, &[], mode)?, + build_config: BuildConfig::new(gctx, jobs, keep_going, &[], mode)?, cli_features: CliFeatures::new_all(false), spec: ops::Packages::Packages(Vec::new()), filter: CompileFilter::Default { @@ -150,13 +150,13 @@ pub fn compile_ws<'a>( let interner = UnitInterner::new(); let bcx = create_bcx(ws, options, &interner)?; if options.build_config.unit_graph { - unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.config())?; + unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.gctx())?; return Compilation::new(&bcx); } - crate::core::gc::auto_gc(bcx.config); + crate::core::gc::auto_gc(bcx.gctx); let _p = profile::start("compiling"); - let cx = Context::new(&bcx)?; - cx.compile(exec) + let build_runner = BuildRunner::new(&bcx)?; + build_runner.compile(exec) } /// Executes `rustc --print `. @@ -172,13 +172,13 @@ pub fn print<'a>( ref target_rustc_args, .. } = *options; - let config = ws.config(); - let rustc = config.load_global_rustc(Some(ws))?; + let gctx = ws.gctx(); + let rustc = gctx.load_global_rustc(Some(ws))?; for (index, kind) in build_config.requested_kinds.iter().enumerate() { if index != 0 { - drop_println!(config); + drop_println!(gctx); } - let target_info = TargetInfo::new(config, &build_config.requested_kinds, &rustc, *kind)?; + let target_info = TargetInfo::new(gctx, &build_config.requested_kinds, &rustc, *kind)?; let mut process = rustc.process(); process.args(&target_info.rustflags); if let Some(args) = target_rustc_args { @@ -197,11 +197,11 @@ pub fn print<'a>( /// /// For how it works and what data it collects, /// please see the [module-level documentation](self). -pub fn create_bcx<'a, 'cfg>( - ws: &'a Workspace<'cfg>, +pub fn create_bcx<'a, 'gctx>( + ws: &'a Workspace<'gctx>, options: &'a CompileOptions, interner: &'a UnitInterner, -) -> CargoResult> { +) -> CargoResult> { let CompileOptions { ref build_config, ref spec, @@ -213,7 +213,7 @@ pub fn create_bcx<'a, 'cfg>( rustdoc_document_private_items, honor_rust_version, } = *options; - let config = ws.config(); + let gctx = ws.gctx(); // Perform some pre-flight validation. match build_config.mode { @@ -222,21 +222,21 @@ pub fn create_bcx<'a, 'cfg>( | CompileMode::Check { .. } | CompileMode::Bench | CompileMode::RunCustomBuild => { - if ws.config().get_env("RUST_FLAGS").is_ok() { - config.shell().warn( + if ws.gctx().get_env("RUST_FLAGS").is_ok() { + gctx.shell().warn( "Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?", )?; } } CompileMode::Doc { .. } | CompileMode::Doctest | CompileMode::Docscrape => { - if ws.config().get_env("RUSTDOC_FLAGS").is_ok() { - config.shell().warn( + if ws.gctx().get_env("RUSTDOC_FLAGS").is_ok() { + gctx.shell().warn( "Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?" )?; } } } - config.validate_term_config()?; + gctx.validate_term_config()?; let mut target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?; @@ -278,7 +278,7 @@ pub fn create_bcx<'a, 'cfg>( resolved_features, } = resolve; - let std_resolve_features = if let Some(crates) = &config.cli_unstable().build_std { + let std_resolve_features = if let Some(crates) = &gctx.cli_unstable().build_std { let (std_package_set, std_resolve, std_features) = standard_lib::resolve_std(ws, &mut target_data, &build_config, crates)?; pkg_set.add_set(std_package_set); @@ -302,7 +302,7 @@ pub fn create_bcx<'a, 'cfg>( to_builds.sort_by_key(|p| p.package_id()); for pkg in to_builds.iter() { - pkg.manifest().print_teapot(config); + pkg.manifest().print_teapot(gctx); if build_config.mode.is_any_test() && !ws.is_member(pkg) @@ -332,7 +332,7 @@ pub fn create_bcx<'a, 'cfg>( let profiles = Profiles::new(ws, build_config.requested_profile)?; profiles.validate_packages( ws.profiles(), - &mut config.shell(), + &mut gctx.shell(), workspace_resolve.as_ref().unwrap_or(&resolve), )?; @@ -375,7 +375,7 @@ pub fn create_bcx<'a, 'cfg>( override_rustc_crate_types(&mut units, args, interner)?; } - let should_scrape = build_config.mode.is_doc() && config.cli_unstable().rustdoc_scrape_examples; + let should_scrape = build_config.mode.is_doc() && gctx.cli_unstable().rustdoc_scrape_examples; let mut scrape_units = if should_scrape { UnitGenerator { mode: CompileMode::Docscrape, @@ -386,7 +386,7 @@ pub fn create_bcx<'a, 'cfg>( Vec::new() }; - let std_roots = if let Some(crates) = standard_lib::std_crates(config, Some(&units)) { + let std_roots = if let Some(crates) = standard_lib::std_crates(gctx, Some(&units)) { let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap(); standard_lib::generate_std_roots( &crates, @@ -427,7 +427,7 @@ pub fn create_bcx<'a, 'cfg>( .iter() .any(CompileKind::is_host); let should_share_deps = host_kind_requested - || config.cli_unstable().bindeps + || gctx.cli_unstable().bindeps && unit_graph .iter() .any(|(unit, _)| unit.artifact_target_for_features.is_some()); diff --git a/src/cargo/ops/cargo_compile/packages.rs b/src/cargo/ops/cargo_compile/packages.rs index e0e4bbdabec..5f908ede14e 100644 --- a/src/cargo/ops/cargo_compile/packages.rs +++ b/src/cargo/ops/cargo_compile/packages.rs @@ -63,7 +63,7 @@ impl Packages { .map(Package::package_id) .map(|id| id.to_spec()) .collect(); - let warn = |e| ws.config().shell().warn(e); + let warn = |e| ws.gctx().shell().warn(e); let names = ids .into_iter() .map(|id| id.to_string()) diff --git a/src/cargo/ops/cargo_compile/unit_generator.rs b/src/cargo/ops/cargo_compile/unit_generator.rs index d6b111dd013..18bbab12dc1 100644 --- a/src/cargo/ops/cargo_compile/unit_generator.rs +++ b/src/cargo/ops/cargo_compile/unit_generator.rs @@ -44,8 +44,8 @@ struct Proposal<'a> { /// [`generate_root_units`]: UnitGenerator::generate_root_units /// [`build_unit_dependencies`]: crate::core::compiler::unit_dependencies::build_unit_dependencies /// [`UnitGraph`]: crate::core::compiler::unit_graph::UnitGraph -pub(super) struct UnitGenerator<'a, 'cfg> { - pub ws: &'a Workspace<'cfg>, +pub(super) struct UnitGenerator<'a, 'gctx> { + pub ws: &'a Workspace<'gctx>, pub packages: &'a [&'a Package], pub filter: &'a CompileFilter, pub requested_kinds: &'a [CompileKind], @@ -54,7 +54,7 @@ pub(super) struct UnitGenerator<'a, 'cfg> { pub resolve: &'a Resolve, pub workspace_resolve: &'a Option, pub resolved_features: &'a features::ResolvedFeatures, - pub package_set: &'a PackageSet<'cfg>, + pub package_set: &'a PackageSet<'gctx>, pub profiles: &'a Profiles, pub interner: &'a UnitInterner, pub has_dev_units: HasDevUnits, @@ -148,7 +148,7 @@ impl<'a> UnitGenerator<'a, '_> { // // Forcing the lib to be compiled three times during `cargo // test` is probably also not desirable. - UnitFor::new_test(self.ws.config(), kind) + UnitFor::new_test(self.ws.gctx(), kind) } else if target.for_host() { // Proc macro / plugin should not have `panic` set. UnitFor::new_compiler(kind) @@ -361,7 +361,7 @@ impl<'a> UnitGenerator<'a, '_> { if self.mode.is_doc_test() && !target.doctestable() { let types = target.rustc_crate_types(); let types_str: Vec<&str> = types.iter().map(|t| t.as_str()).collect(); - self.ws.config().shell().warn(format!( + self.ws.gctx().shell().warn(format!( "doc tests are not supported for crate type(s) `{}` in package `{}`", types_str.join(", "), pkg.name() @@ -487,7 +487,7 @@ impl<'a> UnitGenerator<'a, '_> { let skipped_examples = skipped_examples.into_inner(); if !skipped_examples.is_empty() { - let mut shell = self.ws.config().shell(); + let mut shell = self.ws.gctx().shell(); let example_str = skipped_examples.join(", "); shell.warn(format!( "\ @@ -505,7 +505,7 @@ Rustdoc did not scrape the following examples because they require dev-dependenc /// We want to emit a warning to make sure the user knows that this run is a no-op, /// and their code remains unchecked despite cargo not returning any errors fn unmatched_target_filters(&self, units: &[Unit]) -> CargoResult<()> { - let mut shell = self.ws.config().shell(); + let mut shell = self.ws.gctx().shell(); if let CompileFilter::Only { all_targets, lib: _, @@ -562,7 +562,7 @@ Rustdoc did not scrape the following examples because they require dev-dependenc Some(resolve) => resolve, }; - let mut shell = self.ws.config().shell(); + let mut shell = self.ws.gctx().shell(); for feature in required_features { let fv = FeatureValue::new(feature.into()); match &fv { diff --git a/src/cargo/ops/cargo_config.rs b/src/cargo/ops/cargo_config.rs index 2277bd6f836..fee575b5b50 100644 --- a/src/cargo/ops/cargo_config.rs +++ b/src/cargo/ops/cargo_config.rs @@ -1,6 +1,6 @@ //! Implementation of `cargo config` subcommand. -use crate::util::config::{Config, ConfigKey, ConfigValue as CV, Definition}; +use crate::util::config::{ConfigKey, ConfigValue as CV, Definition, GlobalContext}; use crate::util::errors::CargoResult; use crate::{drop_eprintln, drop_println}; use anyhow::{bail, format_err, Error}; @@ -50,7 +50,7 @@ pub struct GetOptions<'a> { pub merged: bool, } -pub fn get(config: &Config, opts: &GetOptions<'_>) -> CargoResult<()> { +pub fn get(gctx: &GlobalContext, opts: &GetOptions<'_>) -> CargoResult<()> { if opts.show_origin && !matches!(opts.format, ConfigFormat::Toml) { bail!( "the `{}` format does not support --show-origin, try the `toml` format instead", @@ -62,23 +62,23 @@ pub fn get(config: &Config, opts: &GetOptions<'_>) -> CargoResult<()> { None => ConfigKey::new(), }; if opts.merged { - let cv = config + let cv = gctx .get_cv_with_env(&key)? .ok_or_else(|| format_err!("config value `{}` is not set", key))?; match opts.format { - ConfigFormat::Toml => print_toml(config, opts, &key, &cv), - ConfigFormat::Json => print_json(config, &key, &cv, true), - ConfigFormat::JsonValue => print_json(config, &key, &cv, false), + ConfigFormat::Toml => print_toml(gctx, opts, &key, &cv), + ConfigFormat::Json => print_json(gctx, &key, &cv, true), + ConfigFormat::JsonValue => print_json(gctx, &key, &cv, false), } - if let Some(env) = maybe_env(config, &key, &cv) { + if let Some(env) = maybe_env(gctx, &key, &cv) { match opts.format { - ConfigFormat::Toml => print_toml_env(config, &env), - ConfigFormat::Json | ConfigFormat::JsonValue => print_json_env(config, &env), + ConfigFormat::Toml => print_toml_env(gctx, &env), + ConfigFormat::Json | ConfigFormat::JsonValue => print_json_env(gctx, &env), } } } else { match &opts.format { - ConfigFormat::Toml => print_toml_unmerged(config, opts, &key)?, + ConfigFormat::Toml => print_toml_unmerged(gctx, opts, &key)?, format => bail!( "the `{}` format does not support --merged=no, try the `toml` format instead", format @@ -89,18 +89,18 @@ pub fn get(config: &Config, opts: &GetOptions<'_>) -> CargoResult<()> { } /// Checks for environment variables that might be used. -fn maybe_env<'config>( - config: &'config Config, +fn maybe_env<'gctx>( + gctx: &'gctx GlobalContext, key: &ConfigKey, cv: &CV, -) -> Option> { +) -> Option> { // Only fetching a table is unable to load env values. Leaf entries should // work properly. match cv { CV::Table(_map, _def) => {} _ => return None, } - let mut env: Vec<_> = config + let mut env: Vec<_> = gctx .env() .filter(|(env_key, _val)| env_key.starts_with(&format!("{}_", key.as_env_key()))) .collect(); @@ -112,7 +112,7 @@ fn maybe_env<'config>( } } -fn print_toml(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey, cv: &CV) { +fn print_toml(gctx: &GlobalContext, opts: &GetOptions<'_>, key: &ConfigKey, cv: &CV) { let origin = |def: &Definition| -> String { if !opts.show_origin { return "".to_string(); @@ -120,10 +120,10 @@ fn print_toml(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey, cv: &CV) format!(" # {}", def) }; match cv { - CV::Boolean(val, def) => drop_println!(config, "{} = {}{}", key, val, origin(def)), - CV::Integer(val, def) => drop_println!(config, "{} = {}{}", key, val, origin(def)), + CV::Boolean(val, def) => drop_println!(gctx, "{} = {}{}", key, val, origin(def)), + CV::Integer(val, def) => drop_println!(gctx, "{} = {}{}", key, val, origin(def)), CV::String(val, def) => drop_println!( - config, + gctx, "{} = {}{}", key, toml_edit::Value::from(val), @@ -131,20 +131,20 @@ fn print_toml(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey, cv: &CV) ), CV::List(vals, _def) => { if opts.show_origin { - drop_println!(config, "{} = [", key); + drop_println!(gctx, "{} = [", key); for (val, def) in vals { drop_println!( - config, + gctx, " {}, # {}", serde::Serialize::serialize(val, toml_edit::ser::ValueSerializer::new()) .unwrap(), def ); } - drop_println!(config, "]"); + drop_println!(gctx, "]"); } else { let vals: toml_edit::Array = vals.iter().map(|x| &x.0).collect(); - drop_println!(config, "{} = {}", key, vals); + drop_println!(gctx, "{} = {}", key, vals); } } CV::Table(table, _def) => { @@ -155,35 +155,35 @@ fn print_toml(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey, cv: &CV) // push or push_sensitive shouldn't matter here, since this is // not dealing with environment variables. subkey.push(table_key); - print_toml(config, opts, &subkey, val); + print_toml(gctx, opts, &subkey, val); } } } } -fn print_toml_env(config: &Config, env: &[(&str, &str)]) { +fn print_toml_env(gctx: &GlobalContext, env: &[(&str, &str)]) { drop_println!( - config, + gctx, "# The following environment variables may affect the loaded values." ); for (env_key, env_value) in env { let val = shell_escape::escape(Cow::Borrowed(env_value)); - drop_println!(config, "# {}={}", env_key, val); + drop_println!(gctx, "# {}={}", env_key, val); } } -fn print_json_env(config: &Config, env: &[(&str, &str)]) { +fn print_json_env(gctx: &GlobalContext, env: &[(&str, &str)]) { drop_eprintln!( - config, + gctx, "note: The following environment variables may affect the loaded values." ); for (env_key, env_value) in env { let val = shell_escape::escape(Cow::Borrowed(env_value)); - drop_eprintln!(config, "{}={}", env_key, val); + drop_eprintln!(gctx, "{}={}", env_key, val); } } -fn print_json(config: &Config, key: &ConfigKey, cv: &CV, include_key: bool) { +fn print_json(gctx: &GlobalContext, key: &ConfigKey, cv: &CV, include_key: bool) { let json_value = if key.is_root() || !include_key { cv_to_json(cv) } else { @@ -199,7 +199,7 @@ fn print_json(config: &Config, key: &ConfigKey, cv: &CV, include_key: bool) { table[last_part] = cv_to_json(cv); root_table }; - drop_println!(config, "{}", serde_json::to_string(&json_value).unwrap()); + drop_println!(gctx, "{}", serde_json::to_string(&json_value).unwrap()); // Helper for recursively converting a CV to JSON. fn cv_to_json(cv: &CV) -> serde_json::Value { @@ -222,11 +222,15 @@ fn print_json(config: &Config, key: &ConfigKey, cv: &CV, include_key: bool) { } } -fn print_toml_unmerged(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey) -> CargoResult<()> { +fn print_toml_unmerged( + gctx: &GlobalContext, + opts: &GetOptions<'_>, + key: &ConfigKey, +) -> CargoResult<()> { let print_table = |cv: &CV| { - drop_println!(config, "# {}", cv.definition()); - print_toml(config, opts, &ConfigKey::new(), cv); - drop_println!(config, ""); + drop_println!(gctx, "# {}", cv.definition()); + print_toml(gctx, opts, &ConfigKey::new(), cv); + drop_println!(gctx, ""); }; // This removes entries from the given CV so that all that remains is the // given key. Returns false if no entries were found. @@ -261,7 +265,7 @@ fn print_toml_unmerged(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey) }) } - let mut cli_args = config.cli_args_as_table()?; + let mut cli_args = gctx.cli_args_as_table()?; if trim_cv(&mut cli_args, key)? { print_table(&cli_args); } @@ -284,23 +288,23 @@ fn print_toml_unmerged(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey) // TODO: It might be a good idea to teach the Config loader to support // environment variable aliases so that these special cases are less // special, and will just naturally get loaded as part of the config. - let mut env: Vec<_> = config + let mut env: Vec<_> = gctx .env() .filter(|(env_key, _val)| env_key.starts_with(key.as_env_key())) .collect(); if !env.is_empty() { env.sort_by_key(|x| x.0); - drop_println!(config, "# Environment variables"); + drop_println!(gctx, "# Environment variables"); for (key, value) in env { // Displaying this in "shell" syntax instead of TOML, since that // somehow makes more sense to me. let val = shell_escape::escape(Cow::Borrowed(value)); - drop_println!(config, "# {}={}", key, val); + drop_println!(gctx, "# {}={}", key, val); } - drop_println!(config, ""); + drop_println!(gctx, ""); } - let unmerged = config.load_values_unmerged()?; + let unmerged = gctx.load_values_unmerged()?; for mut cv in unmerged { if trim_cv(&mut cv, key)? { print_table(&cv); diff --git a/src/cargo/ops/cargo_doc.rs b/src/cargo/ops/cargo_doc.rs index 518819b2d00..542f4926959 100644 --- a/src/cargo/ops/cargo_doc.rs +++ b/src/cargo/ops/cargo_doc.rs @@ -1,7 +1,7 @@ use crate::core::compiler::{Compilation, CompileKind}; use crate::core::{Shell, Workspace}; use crate::ops; -use crate::util::config::{Config, PathAndArgs}; +use crate::util::config::{GlobalContext, PathAndArgs}; use crate::util::CargoResult; use anyhow::{bail, Error}; use std::path::Path; @@ -66,16 +66,16 @@ pub fn doc(ws: &Workspace<'_>, options: &DocOptions) -> CargoResult<()> { if path.exists() { let config_browser = { - let cfg: Option = ws.config().get("doc.browser")?; - cfg.map(|path_args| (path_args.path.resolve_program(ws.config()), path_args.args)) + let cfg: Option = ws.gctx().get("doc.browser")?; + cfg.map(|path_args| (path_args.path.resolve_program(ws.gctx()), path_args.args)) }; - let mut shell = ws.config().shell(); + let mut shell = ws.gctx().shell(); let link = shell.err_file_hyperlink(&path); shell.status( "Opening", format!("{}{}{}", link.open(), path.display(), link.close()), )?; - open_docs(&path, &mut shell, config_browser, ws.config())?; + open_docs(&path, &mut shell, config_browser, ws.gctx())?; } } else { for name in &compilation.root_crate_names { @@ -83,7 +83,7 @@ pub fn doc(ws: &Workspace<'_>, options: &DocOptions) -> CargoResult<()> { let path = path_by_output_format(&compilation, &kind, &name, &options.output_format); if path.exists() { - let mut shell = ws.config().shell(); + let mut shell = ws.gctx().shell(); let link = shell.err_file_hyperlink(&path); shell.status( "Generated", @@ -119,10 +119,10 @@ fn open_docs( path: &Path, shell: &mut Shell, config_browser: Option<(PathBuf, Vec)>, - config: &Config, + gctx: &GlobalContext, ) -> CargoResult<()> { let browser = - config_browser.or_else(|| Some((PathBuf::from(config.get_env_os("BROWSER")?), Vec::new()))); + config_browser.or_else(|| Some((PathBuf::from(gctx.get_env_os("BROWSER")?), Vec::new()))); match browser { Some((browser, initial_args)) => { diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs index ac2b60aabc0..56ae6db670e 100644 --- a/src/cargo/ops/cargo_fetch.rs +++ b/src/cargo/ops/cargo_fetch.rs @@ -4,11 +4,11 @@ use crate::core::{PackageSet, Resolve, Workspace}; use crate::ops; use crate::util::config::JobsConfig; use crate::util::CargoResult; -use crate::util::Config; +use crate::util::GlobalContext; use std::collections::HashSet; pub struct FetchOptions<'a> { - pub config: &'a Config, + pub gctx: &'a GlobalContext, /// The target arch triple to fetch dependencies for pub targets: Vec, } @@ -23,14 +23,9 @@ pub fn fetch<'a>( let jobs = Some(JobsConfig::Integer(1)); let keep_going = false; - let config = ws.config(); - let build_config = BuildConfig::new( - config, - jobs, - keep_going, - &options.targets, - CompileMode::Build, - )?; + let gctx = ws.gctx(); + let build_config = + BuildConfig::new(gctx, jobs, keep_going, &options.targets, CompileMode::Build)?; let mut data = RustcTargetData::new(ws, &build_config.requested_kinds)?; let mut fetched_packages = HashSet::new(); let mut deps_to_fetch = ws.members().map(|p| p.package_id()).collect::>(); @@ -69,14 +64,14 @@ pub fn fetch<'a>( // If -Zbuild-std was passed, download dependencies for the standard library. // We don't know ahead of time what jobs we'll be running, so tell `std_crates` that. - if let Some(crates) = standard_lib::std_crates(config, None) { + if let Some(crates) = standard_lib::std_crates(gctx, None) { let (std_package_set, _, _) = standard_lib::resolve_std(ws, &mut data, &build_config, &crates)?; packages.add_set(std_package_set); } packages.get_many(to_download)?; - crate::core::gc::auto_gc(config); + crate::core::gc::auto_gc(gctx); Ok((resolve, packages)) } diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs index 30825af7e7b..c27754dbc81 100644 --- a/src/cargo/ops/cargo_generate_lockfile.rs +++ b/src/cargo/ops/cargo_generate_lockfile.rs @@ -7,7 +7,7 @@ use crate::core::{Resolve, SourceId, Workspace}; use crate::ops; use crate::sources::source::QueryKind; use crate::util::cache_lock::CacheLockMode; -use crate::util::config::Config; +use crate::util::config::GlobalContext; use crate::util::style; use crate::util::CargoResult; use anstyle::Style; @@ -16,7 +16,7 @@ use std::collections::{BTreeMap, HashSet}; use tracing::debug; pub struct UpdateOptions<'a> { - pub config: &'a Config, + pub gctx: &'a GlobalContext, pub to_update: Vec, pub precise: Option<&'a str>, pub recursive: bool, @@ -25,7 +25,7 @@ pub struct UpdateOptions<'a> { } pub fn generate_lockfile(ws: &Workspace<'_>) -> CargoResult<()> { - let mut registry = PackageRegistry::new(ws.config())?; + let mut registry = PackageRegistry::new(ws.gctx())?; let mut resolve = ops::resolve_with_previous( &mut registry, ws, @@ -52,7 +52,7 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes // Updates often require a lot of modifications to the registry, so ensure // that we're synchronized against other Cargos. let _lock = ws - .config() + .gctx() .acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?; let previous_resolve = match ops::load_pkg_lockfile(ws)? { @@ -64,7 +64,7 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes // Precise option specified, so calculate a previous_resolve required // by precise package update later. Some(_) => { - let mut registry = PackageRegistry::new(opts.config)?; + let mut registry = PackageRegistry::new(opts.gctx)?; ops::resolve_with_previous( &mut registry, ws, @@ -79,7 +79,7 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes } } }; - let mut registry = PackageRegistry::new(opts.config)?; + let mut registry = PackageRegistry::new(opts.gctx)?; let mut to_avoid = HashSet::new(); if opts.to_update.is_empty() { @@ -156,7 +156,7 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes // Summarize what is changing for the user. let print_change = |status: &str, msg: String, color: &Style| { - opts.config.shell().status_with_color(status, msg, color) + opts.gctx.shell().status_with_color(status, msg, color) }; let mut unchanged_behind = 0; for ResolvedPackageVersions { @@ -268,8 +268,8 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes if let Some(latest) = latest { unchanged_behind += 1; - if opts.config.shell().verbosity() == Verbosity::Verbose { - opts.config.shell().status_with_color( + if opts.gctx.shell().verbosity() == Verbosity::Verbose { + opts.gctx.shell().status_with_color( "Unchanged", format!("{package}{latest}"), &anstyle::Style::new().bold(), @@ -278,19 +278,19 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes } } } - if opts.config.shell().verbosity() == Verbosity::Verbose { - opts.config.shell().note( + if opts.gctx.shell().verbosity() == Verbosity::Verbose { + opts.gctx.shell().note( "to see how you depend on a package, run `cargo tree --invert --package @`", )?; } else { if 0 < unchanged_behind { - opts.config.shell().note(format!( + opts.gctx.shell().note(format!( "pass `--verbose` to see {unchanged_behind} unchanged dependencies behind latest" ))?; } } if opts.dry_run { - opts.config + opts.gctx .shell() .warn("not updating lockfile due to dry run")?; } else { diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index 894b0441ad1..ced0ea932b1 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -10,7 +10,7 @@ use crate::ops::{CompileFilter, Packages}; use crate::sources::source::Source; use crate::sources::{GitSource, PathSource, SourceConfigMap}; use crate::util::errors::CargoResult; -use crate::util::{Config, Filesystem, Rustc}; +use crate::util::{Filesystem, GlobalContext, Rustc}; use crate::{drop_println, ops}; use anyhow::{bail, Context as _}; @@ -37,8 +37,8 @@ impl Drop for Transaction { } } -struct InstallablePackage<'cfg> { - config: &'cfg Config, +struct InstallablePackage<'gctx> { + gctx: &'gctx GlobalContext, opts: ops::CompileOptions, root: Filesystem, source_id: SourceId, @@ -47,15 +47,15 @@ struct InstallablePackage<'cfg> { no_track: bool, pkg: Package, - ws: Workspace<'cfg>, + ws: Workspace<'gctx>, rustc: Rustc, target: String, } -impl<'cfg> InstallablePackage<'cfg> { +impl<'gctx> InstallablePackage<'gctx> { // Returns pkg to install. None if pkg is already installed pub fn new( - config: &'cfg Config, + gctx: &'gctx GlobalContext, root: Filesystem, map: SourceConfigMap<'_>, krate: Option<&str>, @@ -98,16 +98,16 @@ impl<'cfg> InstallablePackage<'cfg> { }; if source_id.is_git() { - let mut source = GitSource::new(source_id, config)?; + let mut source = GitSource::new(source_id, gctx)?; select_pkg( &mut source, dep, |git: &mut GitSource<'_>| git.read_packages(), - config, + gctx, current_rust_version, )? } else if source_id.is_path() { - let mut src = path_source(source_id, config)?; + let mut src = path_source(source_id, gctx)?; if !src.path().is_dir() { bail!( "`{}` is not a directory. \ @@ -141,7 +141,7 @@ impl<'cfg> InstallablePackage<'cfg> { &mut src, dep, |path: &mut PathSource<'_>| path.read_packages(), - config, + gctx, current_rust_version, )? } else if let Some(dep) = dep { @@ -149,7 +149,7 @@ impl<'cfg> InstallablePackage<'cfg> { if let Ok(Some(pkg)) = installed_exact_package( dep.clone(), &mut source, - config, + gctx, original_opts, &root, &dst, @@ -159,13 +159,13 @@ impl<'cfg> InstallablePackage<'cfg> { "package `{}` is already installed, use --force to override", pkg ); - config.shell().status("Ignored", &msg)?; + gctx.shell().status("Ignored", &msg)?; return Ok(None); } select_dep_pkg( &mut source, dep, - config, + gctx, needs_update_if_source_is_index, current_rust_version, )? @@ -179,11 +179,11 @@ impl<'cfg> InstallablePackage<'cfg> { }; let (ws, rustc, target) = - make_ws_rustc_target(config, &original_opts, &source_id, pkg.clone())?; + make_ws_rustc_target(gctx, &original_opts, &source_id, pkg.clone())?; // If we're installing in --locked mode and there's no `Cargo.lock` published // ie. the bin was published before https://github.com/rust-lang/cargo/pull/7026 - if config.locked() && !ws.root().join("Cargo.lock").exists() { - config.shell().warn(format!( + if gctx.locked() && !ws.root().join("Cargo.lock").exists() { + gctx.shell().warn(format!( "no Cargo.lock file published in {}", pkg.to_string() ))?; @@ -209,7 +209,7 @@ impl<'cfg> InstallablePackage<'cfg> { if from_cwd { if pkg.manifest().edition() == Edition::Edition2015 { - config.shell().warn( + gctx.shell().warn( "Using `cargo install` to install the binaries from the \ package in current working directory is deprecated, \ use `cargo install --path .` instead. \ @@ -238,7 +238,7 @@ impl<'cfg> InstallablePackage<'cfg> { } let ip = InstallablePackage { - config, + gctx, opts, root, source_id, @@ -258,13 +258,13 @@ impl<'cfg> InstallablePackage<'cfg> { // Check for conflicts. ip.no_track_duplicates(&dst)?; } else if is_installed( - &ip.pkg, config, &ip.opts, &ip.rustc, &ip.target, &ip.root, &dst, force, + &ip.pkg, gctx, &ip.opts, &ip.rustc, &ip.target, &ip.root, &dst, force, )? { let msg = format!( "package `{}` is already installed, use --force to override", ip.pkg ); - config.shell().status("Ignored", &msg)?; + gctx.shell().status("Ignored", &msg)?; return Ok(None); } @@ -297,14 +297,14 @@ impl<'cfg> InstallablePackage<'cfg> { } fn install_one(mut self) -> CargoResult { - self.config.shell().status("Installing", &self.pkg)?; + self.gctx.shell().status("Installing", &self.pkg)?; let dst = self.root.join("bin").into_path_unlocked(); let mut td_opt = None; let mut needs_cleanup = false; if !self.source_id.is_path() { - let target_dir = if let Some(dir) = self.config.target_dir()? { + let target_dir = if let Some(dir) = self.gctx.target_dir()? { dir } else if let Ok(td) = TempFileBuilder::new().prefix("cargo-install").tempdir() { let p = td.path().to_owned(); @@ -312,7 +312,7 @@ impl<'cfg> InstallablePackage<'cfg> { Filesystem::new(p) } else { needs_cleanup = true; - Filesystem::new(self.config.cwd().join("target-install")) + Filesystem::new(self.gctx.cwd().join("target-install")) }; self.ws.set_target_dir(target_dir); } @@ -385,7 +385,7 @@ impl<'cfg> InstallablePackage<'cfg> { .filter(|t| t.is_executable()) .collect(); if !binaries.is_empty() { - self.config + self.gctx .shell() .warn(make_warning_about_missing_features(&binaries))?; } @@ -398,7 +398,7 @@ impl<'cfg> InstallablePackage<'cfg> { let (tracker, duplicates) = if self.no_track { (None, self.no_track_duplicates(&dst)?) } else { - let tracker = InstallTracker::load(self.config, &self.root)?; + let tracker = InstallTracker::load(self.gctx, &self.root)?; let (_freshness, duplicates) = tracker.check_upgrade( &dst, &self.pkg, @@ -439,7 +439,7 @@ impl<'cfg> InstallablePackage<'cfg> { for bin in to_install.iter() { let src = staging_dir.path().join(bin); let dst = dst.join(bin); - self.config.shell().status("Installing", dst.display())?; + self.gctx.shell().status("Installing", dst.display())?; fs::rename(&src, &dst).with_context(|| { format!("failed to move `{}` to `{}`", src.display(), dst.display()) })?; @@ -454,7 +454,7 @@ impl<'cfg> InstallablePackage<'cfg> { for &bin in to_replace.iter() { let src = staging_dir.path().join(bin); let dst = dst.join(bin); - self.config.shell().status("Replacing", dst.display())?; + self.gctx.shell().status("Replacing", dst.display())?; fs::rename(&src, &dst).with_context(|| { format!("failed to move `{}` to `{}`", src.display(), dst.display()) })?; @@ -479,7 +479,7 @@ impl<'cfg> InstallablePackage<'cfg> { remove_orphaned_bins(&self.ws, &mut tracker, &duplicates, &self.pkg, &dst) { // Don't hard error on remove. - self.config + self.gctx .shell() .warn(format!("failed to remove orphan: {:?}", e))?; } @@ -515,7 +515,7 @@ impl<'cfg> InstallablePackage<'cfg> { } if duplicates.is_empty() { - self.config.shell().status( + self.gctx.shell().status( "Installed", format!( "package `{}` {}", @@ -526,7 +526,7 @@ impl<'cfg> InstallablePackage<'cfg> { Ok(true) } else { if !to_install.is_empty() { - self.config.shell().status( + self.gctx.shell().status( "Installed", format!("package `{}` {}", self.pkg, executables(to_install.iter())), )?; @@ -539,7 +539,7 @@ impl<'cfg> InstallablePackage<'cfg> { pkg_map.entry(key).or_insert_with(Vec::new).push(bin_name); } for (pkg_descr, bin_names) in &pkg_map { - self.config.shell().status( + self.gctx.shell().status( "Replaced", format!( "package `{}` with `{}` {}", @@ -562,7 +562,7 @@ impl<'cfg> InstallablePackage<'cfg> { // wouldn't be available for `compile_ws`. let (pkg_set, resolve) = ops::resolve_ws(&self.ws)?; ops::check_yanked( - self.ws.config(), + self.ws.gctx(), &pkg_set, &resolve, "consider running without --locked", @@ -610,7 +610,7 @@ Consider enabling some of the needed features by passing, e.g., `--features=\"{e } pub fn install( - config: &Config, + gctx: &GlobalContext, root: Option<&str>, krates: Vec<(String, Option)>, source_id: SourceId, @@ -619,12 +619,12 @@ pub fn install( force: bool, no_track: bool, ) -> CargoResult<()> { - let root = resolve_root(root, config)?; + let root = resolve_root(root, gctx)?; let dst = root.join("bin").into_path_unlocked(); - let map = SourceConfigMap::new(config)?; + let map = SourceConfigMap::new(gctx)?; let current_rust_version = if opts.honor_rust_version { - let rustc = config.load_global_rustc(None)?; + let rustc = gctx.load_global_rustc(None)?; // Remove any pre-release identifiers for easier comparison let current_version = &rustc.version; @@ -645,7 +645,7 @@ pub fn install( .map(|(k, v)| (Some(k.as_str()), v.as_ref())) .unwrap_or((None, None)); let installable_pkg = InstallablePackage::new( - config, + gctx, root, map, krate, @@ -676,7 +676,7 @@ pub fn install( let root = root.clone(); let map = map.clone(); match InstallablePackage::new( - config, + gctx, root, map, Some(krate.as_str()), @@ -699,7 +699,7 @@ pub fn install( None } Err(e) => { - crate::display_error(&e, &mut config.shell()); + crate::display_error(&e, &mut gctx.shell()); failed.push(krate.as_str()); // We assume an update was performed if we got an error. did_update = true; @@ -722,7 +722,7 @@ pub fn install( } } Err(e) => { - crate::display_error(&e, &mut config.shell()); + crate::display_error(&e, &mut gctx.shell()); failed.push(krate); } } @@ -739,7 +739,7 @@ pub fn install( )); } if !succeeded.is_empty() || !failed.is_empty() { - config.shell().status("Summary", summary.join(" "))?; + gctx.shell().status("Summary", summary.join(" "))?; } (!succeeded.is_empty(), !failed.is_empty()) @@ -748,11 +748,11 @@ pub fn install( if installed_anything { // Print a warning that if this directory isn't in PATH that they won't be // able to run these commands. - let path = config.get_env_os("PATH").unwrap_or_default(); + let path = gctx.get_env_os("PATH").unwrap_or_default(); let dst_in_path = env::split_paths(&path).any(|path| path == dst); if !dst_in_path { - config.shell().warn(&format!( + gctx.shell().warn(&format!( "be sure to add `{}` to your PATH to be \ able to run the installed binaries", dst.display() @@ -769,7 +769,7 @@ pub fn install( fn is_installed( pkg: &Package, - config: &Config, + gctx: &GlobalContext, opts: &ops::CompileOptions, rustc: &Rustc, target: &str, @@ -777,7 +777,7 @@ fn is_installed( dst: &Path, force: bool, ) -> CargoResult { - let tracker = InstallTracker::load(config, root)?; + let tracker = InstallTracker::load(gctx, root)?; let (freshness, _duplicates) = tracker.check_upgrade(dst, pkg, force, opts, target, &rustc.verbose_version)?; Ok(freshness.is_fresh()) @@ -789,7 +789,7 @@ fn is_installed( fn installed_exact_package( dep: Dependency, source: &mut T, - config: &Config, + gctx: &GlobalContext, opts: &ops::CompileOptions, root: &Filesystem, dst: &Path, @@ -807,31 +807,31 @@ where // expensive network call in the case that the package is already installed. // If this fails, the caller will possibly do an index update and try again, this is just a // best-effort check to see if we can avoid hitting the network. - if let Ok(pkg) = select_dep_pkg(source, dep, config, false, None) { + if let Ok(pkg) = select_dep_pkg(source, dep, gctx, false, None) { let (_ws, rustc, target) = - make_ws_rustc_target(config, opts, &source.source_id(), pkg.clone())?; - if let Ok(true) = is_installed(&pkg, config, opts, &rustc, &target, root, dst, force) { + make_ws_rustc_target(gctx, opts, &source.source_id(), pkg.clone())?; + if let Ok(true) = is_installed(&pkg, gctx, opts, &rustc, &target, root, dst, force) { return Ok(Some(pkg)); } } Ok(None) } -fn make_ws_rustc_target<'cfg>( - config: &'cfg Config, +fn make_ws_rustc_target<'gctx>( + gctx: &'gctx GlobalContext, opts: &ops::CompileOptions, source_id: &SourceId, pkg: Package, -) -> CargoResult<(Workspace<'cfg>, Rustc, String)> { +) -> CargoResult<(Workspace<'gctx>, Rustc, String)> { let mut ws = if source_id.is_git() || source_id.is_path() { - Workspace::new(pkg.manifest_path(), config)? + Workspace::new(pkg.manifest_path(), gctx)? } else { - Workspace::ephemeral(pkg, config, None, false)? + Workspace::ephemeral(pkg, gctx, None, false)? }; - ws.set_ignore_lock(config.lock_update_allowed()); + ws.set_ignore_lock(gctx.lock_update_allowed()); ws.set_require_optional_deps(false); - let rustc = config.load_global_rustc(Some(&ws))?; + let rustc = gctx.load_global_rustc(Some(&ws))?; let target = match &opts.build_config.single_requested_kind()? { CompileKind::Host => rustc.host.as_str().to_owned(), CompileKind::Target(target) => target.short_name().to_owned(), @@ -841,13 +841,13 @@ fn make_ws_rustc_target<'cfg>( } /// Display a list of installed binaries. -pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> { - let root = resolve_root(dst, config)?; - let tracker = InstallTracker::load(config, &root)?; +pub fn install_list(dst: Option<&str>, gctx: &GlobalContext) -> CargoResult<()> { + let root = resolve_root(dst, gctx)?; + let tracker = InstallTracker::load(gctx, &root)?; for (k, v) in tracker.all_installed_bins() { - drop_println!(config, "{}:", k); + drop_println!(gctx, "{}:", k); for bin in v { - drop_println!(config, " {}", bin); + drop_println!(gctx, " {}", bin); } } Ok(()) @@ -890,7 +890,7 @@ fn remove_orphaned_bins( for bin in bins { let full_path = dst.join(bin); if full_path.exists() { - ws.config().shell().status( + ws.gctx().shell().status( "Removing", format!( "executable `{}` from previous version {}", diff --git a/src/cargo/ops/cargo_new.rs b/src/cargo/ops/cargo_new.rs index 4c8023ae039..b0bcf6daca4 100644 --- a/src/cargo/ops/cargo_new.rs +++ b/src/cargo/ops/cargo_new.rs @@ -3,8 +3,8 @@ use crate::util::errors::CargoResult; use crate::util::important_paths::find_root_manifest_for_wd; use crate::util::toml_mut::is_sorted; use crate::util::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo}; -use crate::util::{restricted_names, Config}; -use anyhow::{anyhow, Context}; +use crate::util::{restricted_names, GlobalContext}; +use anyhow::{anyhow, Context as _}; use cargo_util::paths::{self, write_atomic}; use cargo_util_schemas::manifest::PackageName; use serde::de; @@ -437,11 +437,10 @@ fn calculate_new_project_kind( requested_kind } -pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> { +pub fn new(opts: &NewOptions, gctx: &GlobalContext) -> CargoResult<()> { let path = &opts.path; let name = get_name(path, opts)?; - config - .shell() + gctx.shell() .status("Creating", format!("{} `{}` package", opts.kind, name))?; if path.exists() { @@ -451,11 +450,11 @@ pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> { path.display() ) } - check_path(path, &mut config.shell())?; + check_path(path, &mut gctx.shell())?; let is_bin = opts.kind.is_bin(); - check_name(name, opts.name.is_none(), is_bin, &mut config.shell())?; + check_name(name, opts.name.is_none(), is_bin, &mut gctx.shell())?; let mkopts = MkOptions { version_control: opts.version_control, @@ -466,7 +465,7 @@ pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> { registry: opts.registry.as_deref(), }; - mk(config, &mkopts).with_context(|| { + mk(gctx, &mkopts).with_context(|| { format!( "Failed to create package `{}` at `{}`", name, @@ -476,9 +475,9 @@ pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> { Ok(()) } -pub fn init(opts: &NewOptions, config: &Config) -> CargoResult { +pub fn init(opts: &NewOptions, gctx: &GlobalContext) -> CargoResult { // This is here just as a random location to exercise the internal error handling. - if config.get_env_os("__CARGO_TEST_INTERNAL_ERROR").is_some() { + if gctx.get_env_os("__CARGO_TEST_INTERNAL_ERROR").is_some() { return Err(crate::util::internal("internal error test")); } @@ -487,14 +486,13 @@ pub fn init(opts: &NewOptions, config: &Config) -> CargoResult { let mut src_paths_types = vec![]; detect_source_paths_and_types(path, name, &mut src_paths_types)?; let kind = calculate_new_project_kind(opts.kind, opts.auto_detect_kind, &src_paths_types); - config - .shell() + gctx.shell() .status("Creating", format!("{} package", opts.kind))?; if path.join("Cargo.toml").exists() { anyhow::bail!("`cargo init` cannot be run on existing Cargo packages") } - check_path(path, &mut config.shell())?; + check_path(path, &mut gctx.shell())?; let has_bin = kind.is_bin(); @@ -507,7 +505,7 @@ pub fn init(opts: &NewOptions, config: &Config) -> CargoResult { } else { NewProjectKind::Lib }; - config.shell().warn(format!( + gctx.shell().warn(format!( "file `{}` seems to be a {} file", src_paths_types[0].relative_path, file_type ))?; @@ -523,7 +521,7 @@ pub fn init(opts: &NewOptions, config: &Config) -> CargoResult { ) } - check_name(name, opts.name.is_none(), has_bin, &mut config.shell())?; + check_name(name, opts.name.is_none(), has_bin, &mut gctx.shell())?; let mut version_control = opts.version_control; @@ -570,7 +568,7 @@ pub fn init(opts: &NewOptions, config: &Config) -> CargoResult { registry: opts.registry.as_deref(), }; - mk(config, &mkopts).with_context(|| { + mk(gctx, &mkopts).with_context(|| { format!( "Failed to create package `{}` at `{}`", name, @@ -713,7 +711,7 @@ fn write_ignore_file(base_path: &Path, list: &IgnoreList, vcs: VersionControl) - } /// Initializes the correct VCS system based on the provided config. -fn init_vcs(path: &Path, vcs: VersionControl, config: &Config) -> CargoResult<()> { +fn init_vcs(path: &Path, vcs: VersionControl, gctx: &GlobalContext) -> CargoResult<()> { match vcs { VersionControl::Git => { if !path.join(".git").exists() { @@ -721,22 +719,22 @@ fn init_vcs(path: &Path, vcs: VersionControl, config: &Config) -> CargoResult<() // directory in the root of a posix filesystem. // See: https://github.com/libgit2/libgit2/issues/5130 paths::create_dir_all(path)?; - GitRepo::init(path, config.cwd())?; + GitRepo::init(path, gctx.cwd())?; } } VersionControl::Hg => { if !path.join(".hg").exists() { - HgRepo::init(path, config.cwd())?; + HgRepo::init(path, gctx.cwd())?; } } VersionControl::Pijul => { if !path.join(".pijul").exists() { - PijulRepo::init(path, config.cwd())?; + PijulRepo::init(path, gctx.cwd())?; } } VersionControl::Fossil => { if !path.join(".fossil").exists() { - FossilRepo::init(path, config.cwd())?; + FossilRepo::init(path, gctx.cwd())?; } } VersionControl::NoVcs => { @@ -747,10 +745,10 @@ fn init_vcs(path: &Path, vcs: VersionControl, config: &Config) -> CargoResult<() Ok(()) } -fn mk(config: &Config, opts: &MkOptions<'_>) -> CargoResult<()> { +fn mk(gctx: &GlobalContext, opts: &MkOptions<'_>) -> CargoResult<()> { let path = opts.path; let name = opts.name; - let cfg = config.get::("cargo-new")?; + let cfg = gctx.get::("cargo-new")?; // Using the push method with multiple arguments ensures that the entries // for all mutually-incompatible VCS in terms of syntax are in sync. @@ -758,7 +756,7 @@ fn mk(config: &Config, opts: &MkOptions<'_>) -> CargoResult<()> { ignore.push("/target", "^target$", "target"); let vcs = opts.version_control.unwrap_or_else(|| { - let in_existing_vcs = existing_vcs_repo(path.parent().unwrap_or(path), config.cwd()); + let in_existing_vcs = existing_vcs_repo(path.parent().unwrap_or(path), gctx.cwd()); match (cfg.version_control, in_existing_vcs) { (None, false) => VersionControl::Git, (Some(opt), false) => opt, @@ -766,7 +764,7 @@ fn mk(config: &Config, opts: &MkOptions<'_>) -> CargoResult<()> { } }); - init_vcs(path, vcs, config)?; + init_vcs(path, vcs, gctx)?; write_ignore_file(path, &ignore, vcs)?; // Create `Cargo.toml` file with necessary `[lib]` and `[[bin]]` sections, if needed. @@ -849,7 +847,7 @@ fn mk(config: &Config, opts: &MkOptions<'_>) -> CargoResult<()> { &mut workspace_document, &display_path, )? { - config.shell().status( + gctx.shell().status( "Adding", format!( "`{}` as member of workspace at `{}`", @@ -914,16 +912,16 @@ mod tests { } } - if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) { + if let Err(e) = Workspace::new(&path.join("Cargo.toml"), gctx) { crate::display_warning_with_error( "compiling this new package may not work due to invalid \ workspace configuration", &e, - &mut config.shell(), + &mut gctx.shell(), ); } - config.shell().note( + gctx.shell().note( "see more `Cargo.toml` keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html", )?; diff --git a/src/cargo/ops/cargo_output_metadata.rs b/src/cargo/ops/cargo_output_metadata.rs index 3ae0d35779e..408be75faf3 100644 --- a/src/cargo/ops/cargo_output_metadata.rs +++ b/src/cargo/ops/cargo_output_metadata.rs @@ -130,7 +130,7 @@ fn build_resolve_graph( // TODO: Without --filter-platform, features are being resolved for `host` only. // How should this work? let requested_kinds = - CompileKind::from_requested_targets(ws.config(), &metadata_opts.filter_platforms)?; + CompileKind::from_requested_targets(ws.gctx(), &metadata_opts.filter_platforms)?; let mut target_data = RustcTargetData::new(ws, &requested_kinds)?; // Resolve entire workspace. let specs = Packages::All.to_package_id_specs(ws)?; diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index 7dcf295289c..243b37e52e6 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -17,7 +17,7 @@ use crate::util::cache_lock::CacheLockMode; use crate::util::config::JobsConfig; use crate::util::errors::CargoResult; use crate::util::toml::{prepare_for_publish, to_real_manifest}; -use crate::util::{self, human_readable_bytes, restricted_names, Config, FileLock}; +use crate::util::{self, human_readable_bytes, restricted_names, FileLock, GlobalContext}; use crate::{drop_println, ops}; use anyhow::Context as _; use cargo_util::paths; @@ -28,8 +28,8 @@ use tar::{Archive, Builder, EntryType, Header, HeaderMode}; use tracing::debug; use unicase::Ascii as UncasedAscii; -pub struct PackageOpts<'cfg> { - pub config: &'cfg Config, +pub struct PackageOpts<'gctx> { + pub gctx: &'gctx GlobalContext, pub list: bool, pub check_metadata: bool, pub allow_dirty: bool, @@ -87,16 +87,16 @@ pub fn package_one( pkg: &Package, opts: &PackageOpts<'_>, ) -> CargoResult> { - let config = ws.config(); - let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), config); + let gctx = ws.gctx(); + let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx); src.update()?; if opts.check_metadata { - check_metadata(pkg, config)?; + check_metadata(pkg, gctx)?; } if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() { - config.shell().warn( + gctx.shell().warn( "both package.include and package.exclude are specified; \ the exclude list will be ignored", )?; @@ -107,7 +107,7 @@ pub fn package_one( // dirty. let vcs_info = if !opts.allow_dirty { // This will error if a dirty repo is found. - check_repo_state(pkg, &src_files, config)? + check_repo_state(pkg, &src_files, gctx)? } else { None }; @@ -118,7 +118,7 @@ pub fn package_one( if opts.list { for ar_file in ar_files { - drop_println!(config, "{}", ar_file.rel_str); + drop_println!(gctx, "{}", ar_file.rel_str); } return Ok(None); @@ -133,15 +133,14 @@ pub fn package_one( let dir = ws.target_dir().join("package"); let mut dst = { let tmp = format!(".{}", filename); - dir.open_rw_exclusive_create(&tmp, config, "package scratch space")? + dir.open_rw_exclusive_create(&tmp, gctx, "package scratch space")? }; // Package up and test a temporary tarball and only move it to the final // location if it actually passes all our tests. Any previously existing // tarball can be assumed as corrupt or invalid, so we just blow it away if // it exists. - config - .shell() + gctx.shell() .status("Packaging", pkg.package_id().to_string())?; dst.file().set_len(0)?; let uncompressed_size = tar(ws, pkg, ar_files, dst.file(), &filename) @@ -171,7 +170,7 @@ pub fn package_one( filecount, uncompressed.0, uncompressed.1, compressed.0, compressed.1, ); // It doesn't really matter if this fails. - drop(config.shell().status("Packaged", message)); + drop(gctx.shell().status("Packaged", message)); return Ok(Some(dst)); } @@ -196,7 +195,7 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult