diff --git a/.cargo/config b/.cargo/config index fec10deea..29d762df1 100644 --- a/.cargo/config +++ b/.cargo/config @@ -75,4 +75,4 @@ rustflags = [ "-Wfuture_incompatible", "-Wnonstandard_style", "-Wrust_2018_idioms", -] +] \ No newline at end of file diff --git a/.github/workflows/python-bindings.yml b/.github/workflows/python-bindings.yml index 84dcd939f..38dc1be0f 100644 --- a/.github/workflows/python-bindings.yml +++ b/.github/workflows/python-bindings.yml @@ -35,7 +35,7 @@ jobs: lfs: true - uses: prefix-dev/setup-pixi@v0.6.0 with: - pixi-version: v0.13.0 + pixi-version: v0.20.1 cache: true manifest-path: py-rattler/pixi.toml - uses: actions-rust-lang/setup-rust-toolchain@v1 @@ -44,9 +44,9 @@ jobs: - name: Format and Lint run: | cd py-rattler - pixi run lint - pixi run fmt-check + pixi run -e test lint + pixi run -e test fmt-check - name: Run tests run: | cd py-rattler - pixi run test + pixi run -e test test diff --git a/.github/workflows/rust-compile.yml b/.github/workflows/rust-compile.yml index 8cf5468cf..982f70026 100644 --- a/.github/workflows/rust-compile.yml +++ b/.github/workflows/rust-compile.yml @@ -22,7 +22,7 @@ env: RUST_BACKTRACE: 1 RUSTFLAGS: "-D warnings" CARGO_TERM_COLOR: always - DEFAULT_FEATURES: tokio,serde,reqwest,sparse,sysinfo,resolvo + DEFAULT_FEATURES: tokio,serde,reqwest,sparse,sysinfo,resolvo,gateway jobs: check-rustdoc-links: diff --git a/.gitignore b/.gitignore index ae387afa6..1e6de59ee 100644 --- a/.gitignore +++ b/.gitignore @@ -25,3 +25,6 @@ Cargo.lock # pixi .pixi/ pixi.lock + +# Visual studio files +.vs/ diff --git a/Cargo.toml b/Cargo.toml index b293cf319..c3a168951 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -53,6 +53,7 @@ clap = { version = "4.5.4", features = ["derive"] } cmake = "0.1.50" console = { version = "0.15.8", features = ["windows-console-colors"] } criterion = "0.5" +dashmap = "5.5.3" difference = "2.0.0" digest = "0.10.7" dirs = "5.0.1" @@ -63,12 +64,14 @@ fslock = "0.2.1" futures = "0.3.30" futures-util = "0.3.30" fxhash = "0.2.1" +generic-array = "0.14.4" getrandom = { version = "0.2.14", default-features = false } glob = "0.3.1" -google-cloud-auth = { version = "0.13.2", default-features = false} +google-cloud-auth = { version = "0.13.2", default-features = false } hex = "0.4.3" hex-literal = "0.4.1" http = "1.1" +http-cache-semantics = "2.1.0" humansize = "2.1.3" humantime = "2.1.0" indexmap = "2.2.6" @@ -90,6 +93,7 @@ nom = "7.1.3" num_cpus = "1.16.0" once_cell = "1.19.0" ouroboros = "0.18.3" +parking_lot = "0.12.1" pathdiff = "0.2.1" pep440_rs = { version = "0.5.0" } pep508_rs = { version = "0.4.2" } @@ -106,6 +110,7 @@ reqwest-middleware = "0.3.0" reqwest-retry = "0.5.0" resolvo = { version = "0.4.0" } retry-policies = { version = "0.3.0", default-features = false } +rmp-serde = { version = "1.2.0" } rstest = { version = "0.19.0" } rstest_reuse = "0.6.0" serde = { version = "1.0.198" } @@ -141,6 +146,7 @@ tracing = "0.1.40" tracing-subscriber = { version = "0.3.18", default-features = false } tracing-test = { version = "0.2.4" } trybuild = { version = "1.0.91" } +typed-path = { version = "0.8.0" } url = { version = "2.5.0" } uuid = { version = "1.8.0", default-features = false } walkdir = "2.5.0" @@ -149,3 +155,6 @@ zip = { version = "0.6.6", default-features = false } zstd = { version = "0.13.1", default-features = false } [patch.crates-io] + +[profile.release] +debug = true diff --git a/crates/file_url/.gitignore b/crates/file_url/.gitignore new file mode 100644 index 000000000..ea8c4bf7f --- /dev/null +++ b/crates/file_url/.gitignore @@ -0,0 +1 @@ +/target diff --git a/crates/file_url/Cargo.toml b/crates/file_url/Cargo.toml new file mode 100644 index 000000000..e5ea097a0 --- /dev/null +++ b/crates/file_url/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "file_url" +version = "0.1.0" +edition.workspace = true +authors = ["Bas Zalmstra "] +description = "Helper functions to work with file:// urls" +categories.workspace = true +repository.workspace = true +license.workspace = true + +[dependencies] +url = { workspace = true } +percent-encoding = { workspace = true } +itertools = { workspace = true } +typed-path = { workspace = true } + +[dev-dependencies] +rstest = { workspace = true } diff --git a/crates/file_url/src/lib.rs b/crates/file_url/src/lib.rs new file mode 100644 index 000000000..0026b2070 --- /dev/null +++ b/crates/file_url/src/lib.rs @@ -0,0 +1,214 @@ +//! The URL crate parses `file://` URLs differently on Windows and other operating systems. +//! This crates provides functionality that tries to parse a `file://` URL as a path on all operating +//! systems. This is useful when you want to convert a `file://` URL to a path and vice versa. + +use itertools::Itertools; +use percent_encoding::{percent_decode, percent_encode, AsciiSet, CONTROLS}; +use std::fmt::Write; +use std::path::PathBuf; +use std::str::FromStr; +use typed_path::{ + Utf8TypedComponent, Utf8TypedPath, Utf8UnixComponent, Utf8WindowsComponent, Utf8WindowsPrefix, +}; +use url::{Host, Url}; + +/// Returns true if the specified segment is considered to be a Windows drive letter segment. +/// E.g. the segment `C:` or `C%3A` would be considered a drive letter segment. +fn is_windows_drive_letter_segment(segment: &str) -> Option { + // Segment is a simple drive letter: X: + if let Some((drive_letter, ':')) = segment.chars().collect_tuple() { + if drive_letter.is_ascii_alphabetic() { + return Some(format!("{drive_letter}:\\")); + } + } + + // Segment is a simple drive letter but the colon is percent escaped: E.g. X%3A + if let Some((drive_letter, '%', '3', 'a' | 'A')) = segment.chars().collect_tuple() { + if drive_letter.is_ascii_alphabetic() { + return Some(format!("{drive_letter}:\\")); + } + } + + None +} + +/// Tries to convert a `file://` based URL to a path. +/// +/// We assume that any passed URL that represents a path is an absolute path. +/// +/// [`Url::to_file_path`] has a different code path for Windows and other operating systems, this +/// can cause URLs to parse perfectly fine on Windows, but fail to parse on Linux. This function +/// tries to parse the URL as a path on all operating systems. +pub fn url_to_path(url: &Url) -> Option { + if url.scheme() != "file" { + return None; + } + + let mut segments = url.path_segments()?; + let host = match url.host() { + None | Some(Host::Domain("localhost")) => None, + Some(host) => Some(host), + }; + + let (mut path, seperator) = if let Some(host) = host { + // A host is only present for Windows UNC paths + (format!("\\\\{host}\\"), "\\") + } else { + let first = segments.next()?; + if first.starts_with('.') { + // Relative file paths are not supported + return None; + } + + match is_windows_drive_letter_segment(first) { + Some(drive_letter) => (drive_letter, "\\"), + None => (format!("/{first}/"), "/"), + } + }; + + for (idx, segment) in segments.enumerate() { + if idx > 0 { + path.push_str(seperator); + } + match String::from_utf8(percent_decode(segment.as_bytes()).collect()) { + Ok(s) => path.push_str(&s), + _ => return None, + } + } + + Some(PathBuf::from(path)) +} + +const FRAGMENT: &AsciiSet = &CONTROLS.add(b' ').add(b'"').add(b'<').add(b'>').add(b'`'); +const PATH: &AsciiSet = &FRAGMENT.add(b'#').add(b'?').add(b'{').add(b'}'); +pub(crate) const PATH_SEGMENT: &AsciiSet = &PATH.add(b'/').add(b'%'); + +/// Whether the scheme is file:, the path has a single segment, and that segment +/// is a Windows drive letter +#[inline] +pub fn is_windows_drive_letter(segment: &str) -> bool { + segment.len() == 2 && starts_with_windows_drive_letter(segment) +} + +fn starts_with_windows_drive_letter(s: &str) -> bool { + s.len() >= 2 + && (s.as_bytes()[0] as char).is_ascii_alphabetic() + && matches!(s.as_bytes()[1], b':' | b'|') + && (s.len() == 2 || matches!(s.as_bytes()[2], b'/' | b'\\' | b'?' | b'#')) +} + +fn path_to_url<'a>(path: impl Into>) -> Result { + let path = path.into(); + let mut components = path.components(); + + let mut result = String::from("file://"); + let host_start = result.len() + 1; + + let root = components.next(); + match root { + Some(Utf8TypedComponent::Windows(Utf8WindowsComponent::Prefix(ref p))) => match p.kind() { + Utf8WindowsPrefix::Disk(letter) | Utf8WindowsPrefix::VerbatimDisk(letter) => { + result.push('/'); + result.push(letter); + result.push(':'); + } + Utf8WindowsPrefix::UNC(server, share) + | Utf8WindowsPrefix::VerbatimUNC(server, share) => { + let host = Host::parse(server).map_err(|_err| NotAnAbsolutePath)?; + write!(result, "{host}").unwrap(); + result.push('/'); + result.extend(percent_encode(share.as_bytes(), PATH_SEGMENT)); + } + _ => return Err(NotAnAbsolutePath), + }, + Some(Utf8TypedComponent::Unix(Utf8UnixComponent::RootDir)) => {} + _ => return Err(NotAnAbsolutePath), + } + + let mut path_only_has_prefix = true; + for component in components { + if matches!( + component, + Utf8TypedComponent::Windows(Utf8WindowsComponent::RootDir) + | Utf8TypedComponent::Unix(Utf8UnixComponent::RootDir) + ) { + continue; + } + + path_only_has_prefix = false; + let component = component.as_str(); + + result.push('/'); + result.extend(percent_encode(component.as_bytes(), PATH_SEGMENT)); + } + + // A windows drive letter must end with a slash. + if result.len() > host_start + && is_windows_drive_letter(&result[host_start..]) + && path_only_has_prefix + { + result.push('/'); + } + + Ok(result) +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct NotAnAbsolutePath; + +pub fn file_path_to_url<'a>(path: impl Into>) -> Result { + let url = path_to_url(path)?; + Ok(Url::from_str(&url).expect("url string must be a valid url")) +} + +pub fn directory_path_to_url<'a>( + path: impl Into>, +) -> Result { + let mut url = path_to_url(path)?; + if !url.ends_with('/') { + url.push('/'); + } + Ok(Url::from_str(&url).expect("url string must be a valid url")) +} + +#[cfg(test)] +mod tests { + use rstest::rstest; + use std::path::PathBuf; + use url::Url; + + #[rstest] + #[case("file:///home/bob/test-file.txt", Some("/home/bob/test-file.txt"))] + #[case("file:///C:/Test/Foo.txt", Some("C:\\Test\\Foo.txt"))] + #[case("file:///c:/temp/test-file.txt", Some("c:\\temp\\test-file.txt"))] + #[case("file:///c:\\temp\\test-file.txt", Some("c:\\temp\\test-file.txt"))] + // Percent encoding + #[case("file:///foo/ba%20r", Some("/foo/ba r"))] + #[case("file:///C%3A/Test/Foo.txt", Some("C:\\Test\\Foo.txt"))] + // Non file URLs + #[case("http://example.com", None)] + fn test_url_to_path(#[case] url: &str, #[case] expected: Option<&str>) { + let url = url.parse::().unwrap(); + let expected = expected.map(PathBuf::from); + assert_eq!(super::url_to_path(&url), expected); + } + + #[rstest] + #[case::win_drive("C:/", Some("file:///C:/"))] + #[case::unix_path("/root", Some("file:///root"))] + #[case::not_absolute("root", None)] + #[case::win_share("//servername/path", Some("file://servername/path"))] + #[case::dos_device_path("\\\\?\\C:\\Test\\Foo.txt", Some("file:///C:/Test/Foo.txt"))] + #[case::unsupported_guid_volumes( + "\\\\.\\Volume{b75e2c83-0000-0000-0000-602f00000000}\\Test\\Foo.txt", + None + )] + #[case::percent_encoding("//foo/ba r", Some("file://foo/ba%20r"))] + fn test_file_path_to_url(#[case] path: &str, #[case] expected: Option<&str>) { + let expected = expected.map(|s| s.to_string()); + assert_eq!( + super::file_path_to_url(path).map(|u| u.to_string()).ok(), + expected + ); + } +} diff --git a/crates/rattler-bin/Cargo.toml b/crates/rattler-bin/Cargo.toml index 5492d44ae..f801744ff 100644 --- a/crates/rattler-bin/Cargo.toml +++ b/crates/rattler-bin/Cargo.toml @@ -31,13 +31,14 @@ once_cell = { workspace = true } rattler = { path="../rattler", version = "0.24.0", default-features = false } rattler_conda_types = { path="../rattler_conda_types", version = "0.22.1", default-features = false } rattler_networking = { path="../rattler_networking", version = "0.20.5", default-features = false } -rattler_repodata_gateway = { path="../rattler_repodata_gateway", version = "0.19.11", default-features = false, features = ["sparse"] } +rattler_repodata_gateway = { path="../rattler_repodata_gateway", version = "0.19.11", default-features = false, features = ["gateway"] } rattler_solve = { path="../rattler_solve", version = "0.21.1", default-features = false, features = ["resolvo", "libsolv_c"] } rattler_virtual_packages = { path="../rattler_virtual_packages", version = "0.19.9", default-features = false } reqwest = { workspace = true } reqwest-middleware = { workspace = true } tokio = { workspace = true, features = ["rt-multi-thread", "macros"] } tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] } +itertools = { workspace = true } [package.metadata.release] # Dont publish the binary diff --git a/crates/rattler-bin/src/commands/create.rs b/crates/rattler-bin/src/commands/create.rs index 51d12ece9..4a757538f 100644 --- a/crates/rattler-bin/src/commands/create.rs +++ b/crates/rattler-bin/src/commands/create.rs @@ -1,7 +1,8 @@ use crate::global_multi_progress; use anyhow::Context; use futures::{stream, stream::FuturesUnordered, FutureExt, StreamExt, TryFutureExt, TryStreamExt}; -use indicatif::{HumanBytes, ProgressBar, ProgressState, ProgressStyle}; +use indicatif::{ProgressBar, ProgressStyle}; +use itertools::Itertools; use rattler::{ default_cache_dir, install::{ @@ -18,20 +19,18 @@ use rattler_conda_types::{ use rattler_networking::{ retry_policies::default_retry_policy, AuthenticationMiddleware, AuthenticationStorage, }; -use rattler_repodata_gateway::fetch::{ - CacheResult, DownloadProgress, FetchRepoDataError, FetchRepoDataOptions, -}; -use rattler_repodata_gateway::sparse::SparseRepoData; +use rattler_repodata_gateway::{Gateway, RepoData}; use rattler_solve::{ libsolv_c::{self}, - resolvo, ChannelPriority, SolverImpl, SolverTask, + resolvo, ChannelPriority, RepoDataIter, SolverImpl, SolverTask, }; use reqwest::Client; +use std::future::IntoFuture; use std::sync::Arc; +use std::time::Instant; use std::{ borrow::Cow, env, - fmt::Write, future::ready, path::{Path, PathBuf}, str::FromStr, @@ -72,7 +71,7 @@ pub async fn create(opt: Opt) -> anyhow::Result<()> { let target_prefix = opt .target_prefix .unwrap_or_else(|| current_dir.join(".prefix")); - println!("target prefix: {target_prefix:?}"); + println!("Target prefix: {}", target_prefix.display()); // Determine the platform we're going to install for let install_platform = if let Some(platform) = opt.platform { @@ -81,7 +80,7 @@ pub async fn create(opt: Opt) -> anyhow::Result<()> { Platform::current() }; - println!("installing for platform: {install_platform:?}"); + println!("Installing for platform: {install_platform:?}"); // Parse the specs from the command line. We do this explicitly instead of allow clap to deal // with this because we need to parse the `channel_config` when parsing matchspecs. @@ -105,19 +104,6 @@ pub async fn create(opt: Opt) -> anyhow::Result<()> { .map(|channel_str| Channel::from_str(channel_str, &channel_config)) .collect::, _>>()?; - // Each channel contains multiple subdirectories. Users can specify the subdirectories they want - // to use when specifying their channels. If the user didn't specify the default subdirectories - // we use defaults based on the current platform. - let channel_urls = channels - .iter() - .flat_map(|channel| { - vec![ - (channel.clone(), install_platform), - (channel.clone(), Platform::NoArch), - ] - }) - .collect::>(); - // Determine the packages that are currently installed in the environment. let installed_packages = find_installed_packages(&target_prefix, 100) .await @@ -138,54 +124,33 @@ pub async fn create(opt: Opt) -> anyhow::Result<()> { ))) .build(); - let multi_progress = global_multi_progress(); - - let repodata_cache_path = cache_dir.join("repodata"); - let channel_and_platform_len = channel_urls.len(); - let repodata_download_client = download_client.clone(); - let sparse_repo_datas = futures::stream::iter(channel_urls) - .map(move |(channel, platform)| { - let repodata_cache = repodata_cache_path.clone(); - let download_client = repodata_download_client.clone(); - let multi_progress = multi_progress.clone(); - async move { - fetch_repo_data_records_with_progress( - channel, - platform, - &repodata_cache, - download_client.clone(), - multi_progress, - ) - .await - } - }) - .buffer_unordered(channel_and_platform_len) - .filter_map(|result| async move { - match result { - Err(e) => Some(Err(e)), - Ok(Some(data)) => Some(Ok(data)), - Ok(None) => None, - } - }) - .collect::>() - .await - // Collect into another iterator where we extract the first erroneous result - .into_iter() - .collect::, _>>()?; - // Get the package names from the matchspecs so we can only load the package records that we need. - let package_names = specs.iter().filter_map(|spec| spec.name.as_ref().cloned()); - let repodatas = wrap_in_progress("parsing repodata", move || { - SparseRepoData::load_records_recursive( - &sparse_repo_datas, - package_names, - Some(|record| { - if record.name.as_normalized() == "python" { - record.depends.push("pip".to_string()); - } - }), - ) - })?; + let gateway = Gateway::builder() + .with_cache_dir(cache_dir.join("repodata")) + .with_client(download_client.clone()) + .finish(); + + let start_load_repo_data = Instant::now(); + let repo_data = wrap_in_async_progress( + "loading repodata", + gateway + .query( + channels, + [install_platform, Platform::NoArch], + specs.clone(), + ) + .recursive(true), + ) + .await + .context("failed to load repodata")?; + + // Determine the number of recors + let total_records: usize = repo_data.iter().map(RepoData::len).sum(); + println!( + "Loaded {} records in {:?}", + total_records, + start_load_repo_data.elapsed() + ); // Determine virtual packages of the system. These packages define the capabilities of the // system. Some packages depend on these virtual packages to indiciate compability with the @@ -218,7 +183,12 @@ pub async fn create(opt: Opt) -> anyhow::Result<()> { } })?; - println!("virtual packages: {virtual_packages:?}"); + println!( + "Virtual packages:\n{}\n", + virtual_packages + .iter() + .format_with("\n", |i, f| f(&format_args!(" - {i}",)),) + ); // Now that we parsed and downloaded all information, construct the packaging problem that we // need to solve. We do this by constructing a `SolverProblem`. This encapsulates all the @@ -229,7 +199,7 @@ pub async fn create(opt: Opt) -> anyhow::Result<()> { .collect(); let solver_task = SolverTask { - available_packages: &repodatas, + available_packages: repo_data.iter().map(RepoDataIter).collect::>(), locked_packages, virtual_packages, specs, @@ -274,19 +244,30 @@ pub async fn create(opt: Opt) -> anyhow::Result<()> { for operation in &transaction.operations { match operation { - TransactionOperation::Install(r) => println!("* Install: {}", format_record(r)), + TransactionOperation::Install(r) => { + println!("{} {}", console::style("+").green(), format_record(r)); + } TransactionOperation::Change { old, new } => { println!( - "* Change: {} -> {}", + "{} {} -> {}", + console::style("~").yellow(), format_record(&old.repodata_record), format_record(new) ); } TransactionOperation::Reinstall(r) => { - println!("* Reinstall: {}", format_record(&r.repodata_record)); + println!( + "{} {}", + console::style("~").yellow(), + format_record(&r.repodata_record) + ); } TransactionOperation::Remove(r) => { - println!("* Remove: {}", format_record(&r.repodata_record)); + println!( + "{} {}", + console::style("-").red(), + format_record(&r.repodata_record) + ); } } } @@ -571,127 +552,141 @@ fn wrap_in_progress T>(msg: impl Into>, func result } -/// Given a channel and platform, download and cache the `repodata.json` for it. This function -/// reports its progress via a CLI progressbar. -async fn fetch_repo_data_records_with_progress( - channel: Channel, - platform: Platform, - repodata_cache: &Path, - client: reqwest_middleware::ClientWithMiddleware, - multi_progress: indicatif::MultiProgress, -) -> Result, anyhow::Error> { - // Create a progress bar - let progress_bar = multi_progress.add( - indicatif::ProgressBar::new(1) - .with_finish(indicatif::ProgressFinish::AndLeave) - .with_prefix(format!("{}/{platform}", friendly_channel_name(&channel))) - .with_style(default_bytes_style()), - ); - progress_bar.enable_steady_tick(Duration::from_millis(100)); - - // Download the repodata.json - let download_progress_progress_bar = progress_bar.clone(); - let result = rattler_repodata_gateway::fetch::fetch_repo_data( - channel.platform_url(platform), - client, - repodata_cache.to_path_buf(), - FetchRepoDataOptions::default(), - Some(Box::new(move |DownloadProgress { total, bytes }| { - download_progress_progress_bar.set_length(total.unwrap_or(bytes)); - download_progress_progress_bar.set_position(bytes); - })), - ) - .await; - - // Error out if an error occurred, but also update the progress bar - let result = match result { - Err(e) => { - let not_found = matches!(&e, FetchRepoDataError::NotFound(_)); - if not_found && platform != Platform::NoArch { - progress_bar.set_style(finished_progress_style()); - progress_bar.finish_with_message("Not Found"); - return Ok(None); - } - - progress_bar.set_style(errored_progress_style()); - progress_bar.finish_with_message("Error"); - return Err(e.into()); - } - Ok(result) => result, - }; - - // Notify that we are deserializing - progress_bar.set_style(deserializing_progress_style()); - progress_bar.set_message("Deserializing.."); - - // Deserialize the data. This is a hefty blocking operation so we spawn it as a tokio blocking - // task. - let repo_data_json_path = result.repo_data_json_path.clone(); - match tokio::task::spawn_blocking(move || { - SparseRepoData::new( - channel, - platform.to_string(), - repo_data_json_path, - Some(|record: &mut PackageRecord| { - if record.name.as_normalized() == "python" { - record.depends.push("pip".to_string()); - } - }), - ) - }) - .await - { - Ok(Ok(repodata)) => { - progress_bar.set_style(finished_progress_style()); - let is_cache_hit = matches!( - result.cache_result, - CacheResult::CacheHit | CacheResult::CacheHitAfterFetch - ); - progress_bar.finish_with_message(if is_cache_hit { "Using cache" } else { "Done" }); - Ok(Some(repodata)) - } - Ok(Err(err)) => { - progress_bar.set_style(errored_progress_style()); - progress_bar.finish_with_message("Error"); - Err(err.into()) - } - Err(err) => { - if let Ok(panic) = err.try_into_panic() { - std::panic::resume_unwind(panic); - } else { - progress_bar.set_style(errored_progress_style()); - progress_bar.finish_with_message("Cancelled.."); - // Since the task was cancelled most likely the whole async stack is being cancelled. - Err(anyhow::anyhow!("cancelled")) - } - } - } -} - -/// Returns a friendly name for the specified channel. -fn friendly_channel_name(channel: &Channel) -> String { - channel - .name - .as_ref() - .map_or_else(|| channel.canonical_name(), String::from) -} - -/// Returns the style to use for a progressbar that is currently in progress. -fn default_bytes_style() -> indicatif::ProgressStyle { - indicatif::ProgressStyle::default_bar() - .template("{spinner:.green} {prefix:20!} [{elapsed_precise}] [{bar:40!.bright.yellow/dim.white}] {bytes:>8} @ {smoothed_bytes_per_sec:8}").unwrap() - .progress_chars("━━╾─") - .with_key( - "smoothed_bytes_per_sec", - |s: &ProgressState, w: &mut dyn Write| match (s.pos(), s.elapsed().as_millis()) { - (pos, elapsed_ms) if elapsed_ms > 0 => { - write!(w, "{}/s", HumanBytes((pos as f64 * 1000_f64 / elapsed_ms as f64) as u64)).unwrap(); - } - _ => write!(w, "-").unwrap(), - }, - ) +/// Displays a spinner with the given message while running the specified function to completion. +async fn wrap_in_async_progress>( + msg: impl Into>, + fut: F, +) -> T { + let pb = ProgressBar::new_spinner(); + pb.enable_steady_tick(Duration::from_millis(100)); + pb.set_style(long_running_progress_style()); + pb.set_message(msg); + let result = fut.into_future().await; + pb.finish_and_clear(); + result } - +// +// /// Given a channel and platform, download and cache the `repodata.json` for it. This function +// /// reports its progress via a CLI progressbar. +// async fn fetch_repo_data_records_with_progress( +// channel: Channel, +// platform: Platform, +// repodata_cache: &Path, +// client: reqwest_middleware::ClientWithMiddleware, +// multi_progress: indicatif::MultiProgress, +// ) -> Result, anyhow::Error> { +// // Create a progress bar +// let progress_bar = multi_progress.add( +// indicatif::ProgressBar::new(1) +// .with_finish(indicatif::ProgressFinish::AndLeave) +// .with_prefix(format!("{}/{platform}", friendly_channel_name(&channel))) +// .with_style(default_bytes_style()), +// ); +// progress_bar.enable_steady_tick(Duration::from_millis(100)); +// +// // Download the repodata.json +// let download_progress_progress_bar = progress_bar.clone(); +// let result = rattler_repodata_gateway::fetch::fetch_repo_data( +// channel.platform_url(platform), +// client, +// repodata_cache.to_path_buf(), +// FetchRepoDataOptions::default(), +// Some(Box::new(move |DownloadProgress { total, bytes }| { +// download_progress_progress_bar.set_length(total.unwrap_or(bytes)); +// download_progress_progress_bar.set_position(bytes); +// })), +// ) +// .await; +// +// // Error out if an error occurred, but also update the progress bar +// let result = match result { +// Err(e) => { +// let not_found = matches!(&e, FetchRepoDataError::NotFound(_)); +// if not_found && platform != Platform::NoArch { +// progress_bar.set_style(finished_progress_style()); +// progress_bar.finish_with_message("Not Found"); +// return Ok(None); +// } +// +// progress_bar.set_style(errored_progress_style()); +// progress_bar.finish_with_message("Error"); +// return Err(e.into()); +// } +// Ok(result) => result, +// }; +// +// // Notify that we are deserializing +// progress_bar.set_style(deserializing_progress_style()); +// progress_bar.set_message("Deserializing.."); +// +// // Deserialize the data. This is a hefty blocking operation so we spawn it as a tokio blocking +// // task. +// let repo_data_json_path = result.repo_data_json_path.clone(); +// match tokio::task::spawn_blocking(move || { +// SparseRepoData::new( +// channel, +// platform.to_string(), +// repo_data_json_path, +// Some(|record: &mut PackageRecord| { +// if record.name.as_normalized() == "python" { +// record.depends.push("pip".to_string()); +// } +// }), +// ) +// }) +// .await +// { +// Ok(Ok(repodata)) => { +// progress_bar.set_style(finished_progress_style()); +// let is_cache_hit = matches!( +// result.cache_result, +// CacheResult::CacheHit | CacheResult::CacheHitAfterFetch +// ); +// progress_bar.finish_with_message(if is_cache_hit { "Using cache" } else { "Done" }); +// Ok(Some(repodata)) +// } +// Ok(Err(err)) => { +// progress_bar.set_style(errored_progress_style()); +// progress_bar.finish_with_message("Error"); +// Err(err.into()) +// } +// Err(err) => { +// if let Ok(panic) = err.try_into_panic() { +// std::panic::resume_unwind(panic); +// } else { +// progress_bar.set_style(errored_progress_style()); +// progress_bar.finish_with_message("Cancelled.."); +// // Since the task was cancelled most likely the whole async stack is being cancelled. +// Err(anyhow::anyhow!("cancelled")) +// } +// } +// } +// } + +// /// Returns a friendly name for the specified channel. +// fn friendly_channel_name(channel: &Channel) -> String { +// channel +// .name +// .as_ref() +// .map_or_else(|| channel.canonical_name(), String::from) +// } +// +// /// Returns the style to use for a progressbar that is currently in progress. +// fn default_bytes_style() -> indicatif::ProgressStyle { +// indicatif::ProgressStyle::default_bar() +// .template("{spinner:.green} {prefix:20!} [{elapsed_precise}] [{bar:40!.bright.yellow/dim.white}] {bytes:>8} @ {smoothed_bytes_per_sec:8}").unwrap() +// .progress_chars("━━╾─") +// .with_key( +// "smoothed_bytes_per_sec", +// |s: &ProgressState, w: &mut dyn Write| match (s.pos(), s.elapsed().as_millis()) { +// (pos, elapsed_ms) if elapsed_ms > 0 => { +// write!(w, "{}/s", HumanBytes((pos as f64 * 1000_f64 / elapsed_ms as f64) as u64)).unwrap(); +// } +// _ => write!(w, "-").unwrap(), +// }, +// ) +// } +// /// Returns the style to use for a progressbar that is currently in progress. fn default_progress_style() -> indicatif::ProgressStyle { indicatif::ProgressStyle::default_bar() @@ -699,13 +694,13 @@ fn default_progress_style() -> indicatif::ProgressStyle { .progress_chars("━━╾─") } -/// Returns the style to use for a progressbar that is in Deserializing state. -fn deserializing_progress_style() -> indicatif::ProgressStyle { - indicatif::ProgressStyle::default_bar() - .template("{spinner:.green} {prefix:20!} [{elapsed_precise}] {wide_msg}") - .unwrap() - .progress_chars("━━╾─") -} +// /// Returns the style to use for a progressbar that is in Deserializing state. +// fn deserializing_progress_style() -> indicatif::ProgressStyle { +// indicatif::ProgressStyle::default_bar() +// .template("{spinner:.green} {prefix:20!} [{elapsed_precise}] {wide_msg}") +// .unwrap() +// .progress_chars("━━╾─") +// } /// Returns the style to use for a progressbar that is finished. fn finished_progress_style() -> indicatif::ProgressStyle { @@ -718,16 +713,16 @@ fn finished_progress_style() -> indicatif::ProgressStyle { .progress_chars("━━╾─") } -/// Returns the style to use for a progressbar that is in error state. -fn errored_progress_style() -> indicatif::ProgressStyle { - indicatif::ProgressStyle::default_bar() - .template(&format!( - "{} {{prefix:20!}} [{{elapsed_precise}}] {{msg:.bold.red}}", - console::style(console::Emoji("❌", " ")).red() - )) - .unwrap() - .progress_chars("━━╾─") -} +// /// Returns the style to use for a progressbar that is in error state. +// fn errored_progress_style() -> indicatif::ProgressStyle { +// indicatif::ProgressStyle::default_bar() +// .template(&format!( +// "{} {{prefix:20!}} [{{elapsed_precise}}] {{msg:.bold.red}}", +// console::style(console::Emoji("❌", " ")).red() +// )) +// .unwrap() +// .progress_chars("━━╾─") +// } /// Returns the style to use for a progressbar that is indeterminate and simply shows a spinner. fn long_running_progress_style() -> indicatif::ProgressStyle { diff --git a/crates/rattler_conda_types/Cargo.toml b/crates/rattler_conda_types/Cargo.toml index 7b26e012d..410d4c373 100644 --- a/crates/rattler_conda_types/Cargo.toml +++ b/crates/rattler_conda_types/Cargo.toml @@ -12,6 +12,7 @@ readme.workspace = true [dependencies] chrono = { workspace = true } +file_url = { path = "../file_url", version = "0.1.0" } fxhash = { workspace = true } glob = { workspace = true } hex = { workspace = true } @@ -30,6 +31,7 @@ smallvec = { workspace = true, features = ["serde", "const_new", "const_generics strum = { workspace = true, features = ["derive"] } thiserror = { workspace = true } tracing = { workspace = true } +typed-path = { workspace = true } url = { workspace = true, features = ["serde"] } [dev-dependencies] diff --git a/crates/rattler_conda_types/src/channel/mod.rs b/crates/rattler_conda_types/src/channel/mod.rs index 28b875cb5..497068ca8 100644 --- a/crates/rattler_conda_types/src/channel/mod.rs +++ b/crates/rattler_conda_types/src/channel/mod.rs @@ -1,12 +1,13 @@ use itertools::Itertools; use std::borrow::Cow; use std::fmt::{Display, Formatter}; -use std::path::{Component, Path, PathBuf}; +use std::path::{Path, PathBuf}; use std::str::FromStr; +use file_url::directory_path_to_url; use serde::{Deserialize, Serialize}; -use smallvec::SmallVec; use thiserror::Error; +use typed_path::{Utf8NativePathBuf, Utf8TypedPath, Utf8TypedPathBuf}; use url::Url; use super::{ParsePlatformError, Platform}; @@ -121,7 +122,7 @@ pub struct Channel { /// The platforms supported by this channel, or None if no explicit platforms have been /// specified. #[serde(skip_serializing_if = "Option::is_none")] - pub platforms: Option>, + pub platforms: Option>, /// Base URL of the channel, everything is relative to this url. pub base_url: Url, @@ -141,18 +142,19 @@ impl Channel { let channel = if parse_scheme(channel).is_some() { let url = Url::parse(channel)?; - Channel::from_url(url, platforms, config) + Channel { + platforms, + ..Channel::from_url(url) + } } else if is_path(channel) { - let path = PathBuf::from(channel); - #[cfg(target_arch = "wasm32")] return Err(ParseChannelError::InvalidPath(path)); #[cfg(not(target_arch = "wasm32"))] { - let absolute_path = absolute_path(&path, &config.root_dir); - let url = Url::from_directory_path(absolute_path) - .map_err(|_err| ParseChannelError::InvalidPath(path))?; + let absolute_path = absolute_path(channel, &config.root_dir)?; + let url = directory_path_to_url(absolute_path.to_path()) + .map_err(|_err| ParseChannelError::InvalidPath(channel.to_owned()))?; Self { platforms, base_url: url, @@ -160,18 +162,25 @@ impl Channel { } } } else { - Channel::from_name(channel, platforms, config) + Channel { + platforms, + ..Channel::from_name(channel, config) + } }; Ok(channel) } + /// Set the explicit platforms of the channel. + pub fn with_explicit_platforms(self, platforms: impl IntoIterator) -> Self { + Self { + platforms: Some(platforms.into_iter().collect()), + ..self + } + } + /// Constructs a new [`Channel`] from a `Url` and associated platforms. - pub fn from_url( - url: Url, - platforms: Option>>, - _config: &ChannelConfig, - ) -> Self { + pub fn from_url(url: Url) -> Self { // Get the path part of the URL but trim the directory suffix let path = url.path().trim_end_matches('/'); @@ -195,7 +204,7 @@ impl Channel { // Case 7: Fallback let name = path.trim_start_matches('/'); Self { - platforms: platforms.map(Into::into), + platforms: None, name: (!name.is_empty()).then_some(name).map(str::to_owned), base_url, } @@ -205,7 +214,7 @@ impl Channel { .rsplit_once('/') .map_or_else(|| base_url.path(), |(_, path_part)| path_part); Self { - platforms: platforms.map(Into::into), + platforms: None, name: (!name.is_empty()).then_some(name).map(str::to_owned), base_url, } @@ -213,11 +222,7 @@ impl Channel { } /// Construct a channel from a name, platform and configuration. - pub fn from_name( - name: &str, - platforms: Option>, - config: &ChannelConfig, - ) -> Self { + pub fn from_name(name: &str, config: &ChannelConfig) -> Self { // TODO: custom channels let dir_name = if name.ends_with('/') { @@ -228,7 +233,7 @@ impl Channel { let name = name.trim_end_matches('/'); Self { - platforms, + platforms: None, base_url: config .channel_alias .join(dir_name.as_ref()) @@ -237,6 +242,29 @@ impl Channel { } } + /// Constructs a channel from a directory path. + /// + /// # Panics + /// + /// Panics if the path is not an absolute path or could not be canonicalized. + pub fn from_directory(path: &Path) -> Self { + let path = if path.is_absolute() { + Cow::Borrowed(path) + } else { + Cow::Owned( + path.canonicalize() + .expect("path is a not a valid absolute path"), + ) + }; + + let url = Url::from_directory_path(path).expect("path is a valid url"); + Self { + platforms: None, + base_url: url, + name: None, + } + } + /// Returns the name of the channel pub fn name(&self) -> &str { match self.base_url().scheme() { @@ -298,8 +326,16 @@ pub enum ParseChannelError { ParseUrlError(#[source] url::ParseError), /// Error when the path is invalid. - #[error("invalid path '{0}")] - InvalidPath(PathBuf), + #[error("invalid path '{0}'")] + InvalidPath(String), + + /// The root directory is not an absolute path + #[error("root directory from channel config is not an absolute path")] + NonAbsoluteRootDir(PathBuf), + + /// The root directory is not UTF-8 encoded. + #[error("root directory of channel config is not utf8 encoded")] + NotUtf8RootDir(PathBuf), } impl From for ParseChannelError { @@ -316,18 +352,16 @@ impl From for ParseChannelError { /// Extract the platforms from the given human readable channel. #[allow(clippy::type_complexity)] -fn parse_platforms( - channel: &str, -) -> Result<(Option>, &str), ParsePlatformError> { +fn parse_platforms(channel: &str) -> Result<(Option>, &str), ParsePlatformError> { if channel.rfind(']').is_some() { if let Some(start_platform_idx) = channel.find('[') { let platform_part = &channel[start_platform_idx + 1..channel.len() - 1]; - let platforms: SmallVec<_> = platform_part + let platforms = platform_part .split(',') .map(str::trim) .filter(|s| !s.is_empty()) .map(FromStr::from_str) - .collect::>()?; + .collect::, _>>()?; let platforms = if platforms.is_empty() { None } else { @@ -394,67 +428,46 @@ fn is_path(path: &str) -> bool { Some((letter, ':', '/' | '\\')) if letter.is_alphabetic()) } -/// Normalizes a file path by eliminating `..` and `.`. -fn normalize_path(path: &Path) -> PathBuf { - let mut components = path.components().peekable(); - let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { - components.next(); - PathBuf::from(c.as_os_str()) - } else { - PathBuf::new() - }; - - for component in components { - match component { - Component::Prefix(..) => unreachable!(), - Component::RootDir => { - ret.push(component.as_os_str()); - } - Component::CurDir => {} - Component::ParentDir => { - ret.pop(); - } - Component::Normal(c) => { - ret.push(c); - } - } - } - ret -} - /// Returns the specified path as an absolute path -fn absolute_path<'a>(path: &'a Path, root_dir: &Path) -> Cow<'a, Path> { +fn absolute_path(path: &str, root_dir: &Path) -> Result { + let path = Utf8TypedPath::from(path); if path.is_absolute() { - return Cow::Borrowed(path); + return Ok(path.normalize()); + } + + let root_dir_str = root_dir + .to_str() + .ok_or_else(|| ParseChannelError::NotUtf8RootDir(root_dir.to_path_buf()))?; + let native_root_dir = Utf8NativePathBuf::from(root_dir_str); + + if !native_root_dir.is_absolute() { + return Err(ParseChannelError::NonAbsoluteRootDir( + root_dir.to_path_buf(), + )); } - let absolute_dir = root_dir.join(path); - Cow::Owned(normalize_path(&absolute_dir)) + Ok(native_root_dir.to_typed_path().join(path).normalize()) } #[cfg(test)] mod tests { use super::*; - use smallvec::smallvec; - use std::{ - path::{Path, PathBuf}, - str::FromStr, - }; + use std::str::FromStr; use url::Url; #[test] fn test_parse_platforms() { assert_eq!( parse_platforms("[noarch, linux-64]"), - Ok((Some(smallvec![Platform::NoArch, Platform::Linux64]), "")) + Ok((Some(vec![Platform::NoArch, Platform::Linux64]), "")) ); assert_eq!( parse_platforms("sometext[noarch]"), - Ok((Some(smallvec![Platform::NoArch]), "sometext")) + Ok((Some(vec![Platform::NoArch]), "sometext")) ); assert_eq!( parse_platforms("sometext[noarch,]"), - Ok((Some(smallvec![Platform::NoArch]), "sometext")) + Ok((Some(vec![Platform::NoArch]), "sometext")) ); assert_eq!(parse_platforms("sometext[]"), Ok((None, "sometext"))); assert!(matches!( @@ -463,52 +476,28 @@ mod tests { )); } - #[test] - fn test_normalize_path() { - assert_eq!( - normalize_path(Path::new("foo/bar")), - PathBuf::from("foo/bar") - ); - assert_eq!( - normalize_path(Path::new("foo/bar/")), - PathBuf::from("foo/bar/") - ); - assert_eq!( - normalize_path(Path::new("./foo/bar")), - PathBuf::from("foo/bar") - ); - assert_eq!( - normalize_path(Path::new("./foo/../bar")), - PathBuf::from("bar") - ); - assert_eq!( - normalize_path(Path::new("./foo/../bar/..")), - PathBuf::from("") - ); - } - #[test] fn test_absolute_path() { let current_dir = std::env::current_dir().expect("no current dir?"); + let native_current_dir = typed_path::utils::utf8_current_dir() + .expect("") + .to_typed_path_buf(); assert_eq!( - absolute_path(Path::new("."), ¤t_dir).as_ref(), - ¤t_dir + absolute_path(".", ¤t_dir).as_ref(), + Ok(&native_current_dir) ); assert_eq!( - absolute_path(Path::new("foo"), ¤t_dir).as_ref(), - ¤t_dir.join("foo") + absolute_path("foo", ¤t_dir).as_ref(), + Ok(&native_current_dir.join("foo")) ); - let mut parent_dir = current_dir.clone(); + let mut parent_dir = native_current_dir.clone(); assert!(parent_dir.pop()); + assert_eq!(absolute_path("..", ¤t_dir).as_ref(), Ok(&parent_dir)); assert_eq!( - absolute_path(Path::new(".."), ¤t_dir).as_ref(), - &parent_dir - ); - assert_eq!( - absolute_path(Path::new("../foo"), ¤t_dir).as_ref(), - &parent_dir.join("foo") + absolute_path("../foo", ¤t_dir).as_ref(), + Ok(&parent_dir.join("foo")) ); } @@ -539,7 +528,7 @@ mod tests { assert_eq!(channel.name(), "conda-forge"); assert_eq!(channel.platforms, None); - assert_eq!(channel, Channel::from_name("conda-forge/", None, &config)); + assert_eq!(channel, Channel::from_name("conda-forge/", &config)); } #[test] @@ -581,14 +570,12 @@ mod tests { let current_dir = std::env::current_dir().expect("no current dir?"); let channel = Channel::from_str("./dir/does/not_exist", &config).unwrap(); assert_eq!(channel.name.as_deref(), Some("./dir/does/not_exist")); + let expected = absolute_path("./dir/does/not_exist", ¤t_dir).unwrap(); assert_eq!( channel.name(), - Url::from_directory_path(absolute_path( - Path::new("./dir/does/not_exist"), - ¤t_dir - )) - .unwrap() - .as_str() + file_url::directory_path_to_url(expected.to_path()) + .unwrap() + .as_str() ); assert_eq!(channel.platforms, None); assert_eq!( @@ -634,7 +621,7 @@ mod tests { Url::from_str("https://conda.anaconda.org/conda-forge/").unwrap() ); assert_eq!(channel.name.as_deref(), Some("conda-forge")); - assert_eq!(channel.platforms, Some(smallvec![platform])); + assert_eq!(channel.platforms, Some(vec![platform])); let channel = Channel::from_str( format!("https://conda.anaconda.org/pkgs/main[{platform}]"), @@ -646,7 +633,7 @@ mod tests { Url::from_str("https://conda.anaconda.org/pkgs/main/").unwrap() ); assert_eq!(channel.name.as_deref(), Some("pkgs/main")); - assert_eq!(channel.platforms, Some(smallvec![platform])); + assert_eq!(channel.platforms, Some(vec![platform])); let channel = Channel::from_str("conda-forge/label/rust_dev", &config).unwrap(); assert_eq!( diff --git a/crates/rattler_conda_types/src/lib.rs b/crates/rattler_conda_types/src/lib.rs index 1c9b74cf7..4d1a83911 100644 --- a/crates/rattler_conda_types/src/lib.rs +++ b/crates/rattler_conda_types/src/lib.rs @@ -39,6 +39,7 @@ pub use parse_mode::ParseStrictness; pub use platform::{Arch, ParseArchError, ParsePlatformError, Platform}; pub use prefix_record::PrefixRecord; pub use repo_data::patches::{PackageRecordPatch, PatchInstructions, RepoDataPatch}; +pub use repo_data::sharded::{Shard, ShardedRepodata, ShardedSubdirInfo}; pub use repo_data::{ compute_package_url, ChannelInfo, ConvertSubdirError, PackageRecord, RepoData, }; diff --git a/crates/rattler_conda_types/src/match_spec/mod.rs b/crates/rattler_conda_types/src/match_spec/mod.rs index 1b3dc8e44..774a6dcd2 100644 --- a/crates/rattler_conda_types/src/match_spec/mod.rs +++ b/crates/rattler_conda_types/src/match_spec/mod.rs @@ -253,6 +253,16 @@ impl MatchSpec { } } +// Enable constructing a match spec from a package name. +impl From for MatchSpec { + fn from(value: PackageName) -> Self { + Self { + name: Some(value), + ..Default::default() + } + } +} + /// Similar to a [`MatchSpec`] but does not include the package name. This is useful in places /// where the package name is already known (e.g. `foo = "3.4.1 *cuda"`) #[serde_as] diff --git a/crates/rattler_conda_types/src/package/has_prefix.rs b/crates/rattler_conda_types/src/package/has_prefix.rs index a5a678c7c..e4d37c1ca 100644 --- a/crates/rattler_conda_types/src/package/has_prefix.rs +++ b/crates/rattler_conda_types/src/package/has_prefix.rs @@ -79,7 +79,7 @@ impl FromStr for HasPrefixEntry { |(prefix, _, file_mode, _, path)| HasPrefixEntry { prefix: Cow::Owned(prefix.into_owned()), file_mode, - relative_path: PathBuf::from(path.as_ref()), + relative_path: PathBuf::from(&*path), }, ))(buf) } @@ -89,7 +89,7 @@ impl FromStr for HasPrefixEntry { all_consuming(map(possibly_quoted_string, |path| HasPrefixEntry { prefix: Cow::Borrowed(placeholder_string()), file_mode: FileMode::Text, - relative_path: PathBuf::from(path.as_ref()), + relative_path: PathBuf::from(&*path), }))(buf) } diff --git a/crates/rattler_conda_types/src/package/paths.rs b/crates/rattler_conda_types/src/package/paths.rs index 0ec36d038..d639c8e6b 100644 --- a/crates/rattler_conda_types/src/package/paths.rs +++ b/crates/rattler_conda_types/src/package/paths.rs @@ -116,7 +116,7 @@ impl PathsJson { path_type, prefix_placeholder: prefix.map(|entry| PrefixPlaceholder { file_mode: entry.file_mode, - placeholder: entry.prefix.as_ref().to_owned(), + placeholder: (*entry.prefix).to_owned(), }), no_link: no_link.contains(&path), sha256: None, diff --git a/crates/rattler_conda_types/src/repo_data/mod.rs b/crates/rattler_conda_types/src/repo_data/mod.rs index 46959f451..58fef5021 100644 --- a/crates/rattler_conda_types/src/repo_data/mod.rs +++ b/crates/rattler_conda_types/src/repo_data/mod.rs @@ -2,6 +2,7 @@ //! of a channel. It provides indexing functionality. pub mod patches; +pub mod sharded; mod topological_sort; use std::borrow::Cow; diff --git a/crates/rattler_conda_types/src/repo_data/patches.rs b/crates/rattler_conda_types/src/repo_data/patches.rs index 0839924da..4eb6f4e76 100644 --- a/crates/rattler_conda_types/src/repo_data/patches.rs +++ b/crates/rattler_conda_types/src/repo_data/patches.rs @@ -6,7 +6,7 @@ use serde_with::{serde_as, skip_serializing_none, OneOrMany}; use std::io; use std::path::Path; -use crate::{package::ArchiveType, PackageRecord, PackageUrl, RepoData}; +use crate::{package::ArchiveType, PackageRecord, PackageUrl, RepoData, Shard}; /// Represents a Conda repodata patch. /// @@ -148,56 +148,82 @@ impl PackageRecord { } } -impl RepoData { - /// Apply a patch to a repodata file - /// Note that we currently do not handle `revoked` instructions - pub fn apply_patches(&mut self, instructions: &PatchInstructions) { - for (pkg, patch) in instructions.packages.iter() { - if let Some(record) = self.packages.get_mut(pkg) { - record.apply_patch(patch); - } - - // also apply the patch to the conda packages - if let Some((pkg_name, archive_type)) = ArchiveType::split_str(pkg) { - assert!(archive_type == ArchiveType::TarBz2); - if let Some(record) = self.conda_packages.get_mut(&format!("{pkg_name}.conda")) { - record.apply_patch(patch); - } - } +/// Apply a patch to a repodata file +/// Note that we currently do not handle `revoked` instructions +pub fn apply_patches_impl( + packages: &mut FxHashMap, + conda_packages: &mut FxHashMap, + removed: &mut FxHashSet, + instructions: &PatchInstructions, +) { + for (pkg, patch) in instructions.packages.iter() { + if let Some(record) = packages.get_mut(pkg) { + record.apply_patch(patch); } - for (pkg, patch) in instructions.conda_packages.iter() { - if let Some(record) = self.conda_packages.get_mut(pkg) { + // also apply the patch to the conda packages + if let Some((pkg_name, archive_type)) = ArchiveType::split_str(pkg) { + assert!(archive_type == ArchiveType::TarBz2); + if let Some(record) = conda_packages.get_mut(&format!("{pkg_name}.conda")) { record.apply_patch(patch); } } + } - let mut removed = FxHashSet::::default(); - // remove packages that have been removed - for pkg in instructions.remove.iter() { - if let Some((pkg_name, archive_type)) = ArchiveType::split_str(pkg) { - match archive_type { - ArchiveType::TarBz2 => { - if self.packages.remove_entry(pkg).is_some() { - removed.insert(pkg.clone()); - } - - // also remove equivalent .conda package if it exists - let conda_pkg_name = format!("{pkg_name}.conda"); - if self.conda_packages.remove_entry(&conda_pkg_name).is_some() { - removed.insert(conda_pkg_name); - } + for (pkg, patch) in instructions.conda_packages.iter() { + if let Some(record) = conda_packages.get_mut(pkg) { + record.apply_patch(patch); + } + } + + // remove packages that have been removed + for pkg in instructions.remove.iter() { + if let Some((pkg_name, archive_type)) = ArchiveType::split_str(pkg) { + match archive_type { + ArchiveType::TarBz2 => { + if packages.remove_entry(pkg).is_some() { + removed.insert(pkg.clone()); } - ArchiveType::Conda => { - if self.conda_packages.remove_entry(pkg).is_some() { - removed.insert(pkg.clone()); - } + + // also remove equivalent .conda package if it exists + let conda_pkg_name = format!("{pkg_name}.conda"); + if conda_packages.remove_entry(&conda_pkg_name).is_some() { + removed.insert(conda_pkg_name); + } + } + ArchiveType::Conda => { + if conda_packages.remove_entry(pkg).is_some() { + removed.insert(pkg.clone()); } } } } + } +} - self.removed.extend(removed); +impl RepoData { + /// Apply a patch to a repodata file + /// Note that we currently do not handle `revoked` instructions + pub fn apply_patches(&mut self, instructions: &PatchInstructions) { + apply_patches_impl( + &mut self.packages, + &mut self.conda_packages, + &mut self.removed, + instructions, + ); + } +} + +impl Shard { + /// Apply a patch to a shard + /// Note that we currently do not handle `revoked` instructions + pub fn apply_patches(&mut self, instructions: &PatchInstructions) { + apply_patches_impl( + &mut self.packages, + &mut self.conda_packages, + &mut self.removed, + instructions, + ); } } diff --git a/crates/rattler_conda_types/src/repo_data/sharded.rs b/crates/rattler_conda_types/src/repo_data/sharded.rs new file mode 100644 index 000000000..8b0f77b2c --- /dev/null +++ b/crates/rattler_conda_types/src/repo_data/sharded.rs @@ -0,0 +1,43 @@ +//! Structs to deal with repodata "shards" which are per-package repodata files. +use fxhash::{FxHashMap, FxHashSet}; +use rattler_digest::Sha256Hash; +use serde::{Deserialize, Serialize}; +use url::Url; + +use crate::PackageRecord; + +/// The sharded repodata holds a hashmap of package name -> shard (hash). +/// This index file is stored under `//repodata_shards.msgpack.zst` +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ShardedRepodata { + /// Additional information about the sharded subdirectory such as the base url. + pub info: ShardedSubdirInfo, + /// The individual shards indexed by package name. + pub shards: FxHashMap, +} + +/// Information about a sharded subdirectory that is stored inside the index file. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ShardedSubdirInfo { + /// The name of the subdirectory + pub subdir: String, + + /// The base url of the subdirectory. This is the location where the actual + /// packages are stored. + pub base_url: Url, +} + +/// An individual shard that contains repodata for a single package name. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Shard { + /// The records for all `.tar.bz2` packages + pub packages: FxHashMap, + + /// The records for all `.conda` packages + #[serde(rename = "packages.conda", default)] + pub conda_packages: FxHashMap, + + /// The file names of all removed for this shard + #[serde(default)] + pub removed: FxHashSet, +} diff --git a/crates/rattler_conda_types/src/version/mod.rs b/crates/rattler_conda_types/src/version/mod.rs index fb66e2b0a..d8586dbfd 100644 --- a/crates/rattler_conda_types/src/version/mod.rs +++ b/crates/rattler_conda_types/src/version/mod.rs @@ -951,7 +951,6 @@ impl<'de> Deserialize<'de> for Version { D: Deserializer<'de>, { Cow::<'de, str>::deserialize(deserializer)? - .as_ref() .parse() .map_err(D::Error::custom) } diff --git a/crates/rattler_conda_types/src/version/with_source.rs b/crates/rattler_conda_types/src/version/with_source.rs index f603aef7a..ac83b78ba 100644 --- a/crates/rattler_conda_types/src/version/with_source.rs +++ b/crates/rattler_conda_types/src/version/with_source.rs @@ -65,7 +65,7 @@ impl Ord for VersionWithSource { // First order by version then by string representation self.version .cmp(&other.version) - .then_with(|| self.as_str().as_ref().cmp(other.as_str().as_ref())) + .then_with(|| self.as_str().cmp(&other.as_str())) } } diff --git a/crates/rattler_digest/Cargo.toml b/crates/rattler_digest/Cargo.toml index 1a1b9763a..427f11fd1 100644 --- a/crates/rattler_digest/Cargo.toml +++ b/crates/rattler_digest/Cargo.toml @@ -19,10 +19,11 @@ serde = { workspace = true, features = ["derive"], optional = true } serde_with = { workspace = true } sha2 = { workspace = true } tokio = { workspace = true, features = ["io-util"], optional = true } +generic-array = { workspace = true } [features] tokio = ["dep:tokio"] -serde = ["dep:serde"] +serde = ["dep:serde", "generic-array/serde"] [dev-dependencies] rstest = { workspace = true } diff --git a/crates/rattler_digest/src/serde.rs b/crates/rattler_digest/src/serde.rs index 779451122..37dfb92db 100644 --- a/crates/rattler_digest/src/serde.rs +++ b/crates/rattler_digest/src/serde.rs @@ -22,17 +22,27 @@ use std::borrow::Cow; use std::fmt::LowerHex; use std::ops::Deref; -/// Deserialize into [`Output`] of a [`Digest`] +/// Deserialize the [`Output`] of a [`Digest`]. +/// +/// If the deserializer is human-readable, it will parse the digest from a hex +/// string. Otherwise, it will deserialize raw bytes. pub fn deserialize<'de, D, Dig: Digest>(deserializer: D) -> Result, D::Error> where D: Deserializer<'de>, { - let str = Cow::<'de, str>::deserialize(deserializer)?; - super::parse_digest_from_hex::(str.as_ref()) - .ok_or_else(|| Error::custom("failed to parse digest")) + if deserializer.is_human_readable() { + let str = Cow::<'de, str>::deserialize(deserializer)?; + super::parse_digest_from_hex::(str.as_ref()) + .ok_or_else(|| Error::custom("failed to parse digest")) + } else { + Output::::deserialize(deserializer) + } } -/// Serialize into a string +/// Serializes the [`Output`] of a [`Digest`]. +/// +/// If the serializer is human-readable, it will write the digest as a hex +/// string. Otherwise, it will deserialize raw bytes. pub fn serialize<'a, S: Serializer, Dig: Digest>( digest: &'a Output, s: S, @@ -40,7 +50,11 @@ pub fn serialize<'a, S: Serializer, Dig: Digest>( where &'a Output: LowerHex, { - format!("{digest:x}").serialize(s) + if s.is_human_readable() { + format!("{digest:x}").serialize(s) + } else { + digest.serialize(s) + } } /// Wrapper type for easily serializing a Hash diff --git a/crates/rattler_lock/Cargo.toml b/crates/rattler_lock/Cargo.toml index e766828a7..fdc690e04 100644 --- a/crates/rattler_lock/Cargo.toml +++ b/crates/rattler_lock/Cargo.toml @@ -15,8 +15,9 @@ chrono = { workspace = true } fxhash = { workspace = true } indexmap = { workspace = true, features = ["serde"] } itertools = { workspace = true } -rattler_conda_types = { path="../rattler_conda_types", version = "0.22.1", default-features = false } -rattler_digest = { path="../rattler_digest", version = "0.19.3", default-features = false } +rattler_conda_types = { path = "../rattler_conda_types", version = "0.22.1", default-features = false } +rattler_digest = { path = "../rattler_digest", version = "0.19.3", default-features = false } +file_url = { path = "../file_url", version = "0.1.0" } pep508_rs = { workspace = true, features = ["serde"] } pep440_rs = { workspace = true, features = ["serde"] } serde = { workspace = true, features = ["derive"] } @@ -27,8 +28,6 @@ serde_repr = { workspace = true } thiserror = { workspace = true } url = { workspace = true, features = ["serde"] } purl = { workspace = true, features = ["serde"] } -percent-encoding = { workspace = true } - [dev-dependencies] insta = { workspace = true, features = ["yaml"] } similar-asserts = { workspace = true } diff --git a/crates/rattler_lock/src/url_or_path.rs b/crates/rattler_lock/src/url_or_path.rs index fb16a5606..d4350879a 100644 --- a/crates/rattler_lock/src/url_or_path.rs +++ b/crates/rattler_lock/src/url_or_path.rs @@ -1,5 +1,5 @@ +use file_url::url_to_path; use itertools::Itertools; -use percent_encoding::percent_decode; use serde_with::{DeserializeFromStr, SerializeDisplay}; use std::borrow::Cow; use std::hash::Hash; @@ -10,7 +10,7 @@ use std::{ str::FromStr, }; use thiserror::Error; -use url::{Host, Url}; +use url::Url; /// Represents either a URL or a path. /// @@ -27,73 +27,6 @@ pub enum UrlOrPath { Path(PathBuf), } -/// Returns true if the specified segment is considered to be a Windows drive letter segment. -/// E.g. the segment `C:` or `C%3A` would be considered a drive letter segment. -fn is_windows_drive_letter_segment(segment: &str) -> Option { - // Segment is a simple drive letter: X: - if let Some((drive_letter, ':')) = segment.chars().collect_tuple() { - if drive_letter.is_ascii_alphabetic() { - return Some(format!("{drive_letter}:\\")); - } - } - - // Segment is a simple drive letter but the colon is percent escaped: E.g. X%3A - if let Some((drive_letter, '%', '3', 'a' | 'A')) = segment.chars().collect_tuple() { - if drive_letter.is_ascii_alphabetic() { - return Some(format!("{drive_letter}:\\")); - } - } - - None -} - -/// Tries to convert a `file://` based URL to a path. -/// -/// We assume that any passed URL that represents a path is an absolute path. -/// -/// [`Url::to_file_path`] has a different code path for Windows and other operating systems, this -/// can cause URLs to parse perfectly fine on Windows, but fail to parse on Linux. This function -/// tries to parse the URL as a path on all operating systems. -fn url_to_path(url: &Url) -> Option { - if url.scheme() != "file" { - return None; - } - - let mut segments = url.path_segments()?; - let host = match url.host() { - None | Some(Host::Domain("localhost")) => None, - Some(host) => Some(host), - }; - - let (mut path, seperator) = if let Some(host) = host { - // A host is only present for Windows UNC paths - (format!("\\\\{host}\\"), "\\") - } else { - let first = segments.next()?; - if first.starts_with('.') { - // Relative file paths are not supported - return None; - } - - match is_windows_drive_letter_segment(first) { - Some(drive_letter) => (drive_letter, "\\"), - None => (format!("/{first}/"), "/"), - } - }; - - for (idx, segment) in segments.enumerate() { - if idx > 0 { - path.push_str(seperator); - } - match String::from_utf8(percent_decode(segment.as_bytes()).collect()) { - Ok(s) => path.push_str(&s), - _ => return None, - } - } - - Some(PathBuf::from(path)) -} - impl PartialEq for UrlOrPath { fn eq(&self, other: &Self) -> bool { match (self.canonicalize().as_ref(), other.canonicalize().as_ref()) { @@ -209,26 +142,6 @@ mod test { use super::*; use std::str::FromStr; - #[test] - fn test_url_to_path() { - let urls = [ - ("file:///home/bob/test-file.txt", "/home/bob/test-file.txt"), - ("file:///C:/Test/Foo.txt", "C:\\Test\\Foo.txt"), - ("file:///c:/temp/test-file.txt", "c:\\temp\\test-file.txt"), - ("file:///c:\\temp\\test-file.txt", "c:\\temp\\test-file.txt"), - // Percent encoding - ("file:///foo/ba%20r", "/foo/ba r"), - ("file:///C%3A/Test/Foo.txt", "C:\\Test\\Foo.txt"), - ]; - - for (url, path) in urls { - assert_eq!( - url_to_path(&Url::from_str(url).unwrap()).unwrap(), - PathBuf::from(path) - ); - } - } - #[test] fn test_equality() { let tests = [ diff --git a/crates/rattler_networking/src/authentication_middleware.rs b/crates/rattler_networking/src/authentication_middleware.rs index 3921bceb1..0f24159da 100644 --- a/crates/rattler_networking/src/authentication_middleware.rs +++ b/crates/rattler_networking/src/authentication_middleware.rs @@ -8,6 +8,7 @@ use reqwest_middleware::{Middleware, Next}; use std::path::{Path, PathBuf}; use std::sync::OnceLock; use url::Url; + /// `reqwest` middleware to authenticate requests #[derive(Clone, Default)] pub struct AuthenticationMiddleware { @@ -22,8 +23,12 @@ impl Middleware for AuthenticationMiddleware { extensions: &mut http::Extensions, next: Next<'_>, ) -> reqwest_middleware::Result { - let url = req.url().clone(); + // If an `Authorization` header is already present, don't authenticate + if req.headers().get(reqwest::header::AUTHORIZATION).is_some() { + return next.run(req, extensions).await; + } + let url = req.url().clone(); match self.auth_storage.get_by_url(url) { Err(_) => { // Forward error to caller (invalid URL) diff --git a/crates/rattler_package_streaming/tests/extract.rs b/crates/rattler_package_streaming/tests/extract.rs index 6257a5c25..5887bc66d 100644 --- a/crates/rattler_package_streaming/tests/extract.rs +++ b/crates/rattler_package_streaming/tests/extract.rs @@ -128,7 +128,7 @@ fn test_stream_info(#[case] input: &str, #[case] _sha256: &str, #[case] _md5: &s let target_dir = temp_dir.join(format!( "{}-info", - file_path.file_stem().unwrap().to_string_lossy().as_ref() + &file_path.file_stem().unwrap().to_string_lossy() )); info_stream.unpack(target_dir).unwrap(); diff --git a/crates/rattler_package_streaming/tests/write.rs b/crates/rattler_package_streaming/tests/write.rs index b1b31860c..0edec1588 100644 --- a/crates/rattler_package_streaming/tests/write.rs +++ b/crates/rattler_package_streaming/tests/write.rs @@ -173,7 +173,7 @@ fn test_rewrite_tar_bz2() { let new_archive = temp_dir.join(format!( "{}-new.tar.bz2", - file_path.file_stem().unwrap().to_string_lossy().as_ref() + &file_path.file_stem().unwrap().to_string_lossy() )); let writer = File::create(&new_archive).unwrap(); @@ -213,7 +213,7 @@ fn test_rewrite_conda() { let new_archive = temp_dir.join(format!( "{}-new.conda", - file_path.file_stem().unwrap().to_string_lossy().as_ref() + &file_path.file_stem().unwrap().to_string_lossy() )); let writer = File::create(&new_archive).unwrap(); diff --git a/crates/rattler_repodata_gateway/Cargo.toml b/crates/rattler_repodata_gateway/Cargo.toml index 7ff6e3ea3..19a48f27d 100644 --- a/crates/rattler_repodata_gateway/Cargo.toml +++ b/crates/rattler_repodata_gateway/Cargo.toml @@ -11,36 +11,48 @@ license.workspace = true readme.workspace = true [dependencies] +anyhow = { workspace = true } async-compression = { workspace = true, features = ["gzip", "tokio", "bzip2", "zstd"] } +async-trait = { workspace = true, optional = true } blake2 = { workspace = true } -bytes = { workspace = true, optional = true } +bytes = { workspace = true } cache_control = { workspace = true } chrono = { workspace = true, features = ["std", "serde", "alloc", "clock"] } +dashmap = { workspace = true } +dirs = { workspace = true } +file_url = { path = "../file_url", version = "0.1.0" } +futures = { workspace = true } +fxhash = { workspace = true, optional = true } +hex = { workspace = true, features = ["serde"] } +http = { workspace = true, optional = true } +http-cache-semantics = { workspace = true, optional = true, features = ["reqwest", "serde"] } humansize = { workspace = true } humantime = { workspace = true } -futures = { workspace = true } -reqwest = { workspace = true, features = ["stream"] } +itertools = { workspace = true, optional = true } +json-patch = { workspace = true } +md-5 = { workspace = true } +memmap2 = { workspace = true, optional = true } +ouroboros = { workspace = true, optional = true } +parking_lot = { workspace = true, optional = true } +pin-project-lite = { workspace = true } +rattler_conda_types = { path = "../rattler_conda_types", version = "0.22.1", default-features = false, optional = true } +rattler_digest = { path = "../rattler_digest", version = "0.19.3", default-features = false, features = ["tokio", "serde"] } +rattler_networking = { path = "../rattler_networking", version = "0.20.5", default-features = false } +reqwest = { workspace = true, features = ["stream", "http2"] } reqwest-middleware = { workspace = true } -tokio-util = { workspace = true, features = ["codec", "io"] } -tempfile = { workspace = true } -tracing = { workspace = true } -thiserror = { workspace = true } -url = { workspace = true, features = ["serde"] } -tokio = { workspace = true, features = ["rt", "io-util"] } -anyhow = { workspace = true } +rmp-serde = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } -pin-project-lite = { workspace = true } -rattler_digest = { path="../rattler_digest", version = "0.19.3", default-features = false, features = ["tokio", "serde"] } -rattler_conda_types = { path="../rattler_conda_types", version = "0.22.1", default-features = false, optional = true } -memmap2 = { workspace = true, optional = true } -ouroboros = { workspace = true, optional = true } serde_with = { workspace = true } superslice = { workspace = true, optional = true } -itertools = { workspace = true, optional = true } -json-patch = { workspace = true } -hex = { workspace = true, features = ["serde"] } -rattler_networking = { path="../rattler_networking", version = "0.20.5", default-features = false } +tempfile = { workspace = true } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["rt", "io-util", "macros"] } +tokio-util = { workspace = true, features = ["codec", "io"] } +tracing = { workspace = true } +url = { workspace = true, features = ["serde"] } +zstd = { workspace = true } +percent-encoding = { workspace = true } [target.'cfg(unix)'.dependencies] libc = { workspace = true } @@ -54,12 +66,14 @@ axum = { workspace = true, features = ["tokio"] } hex-literal = { workspace = true } insta = { workspace = true, features = ["yaml"] } rstest = { workspace = true } -tokio = { workspace = true, features = ["macros", "rt-multi-thread"] } +tokio = { workspace = true, features = ["rt-multi-thread"] } tower-http = { workspace = true, features = ["fs", "compression-gzip", "trace"] } tracing-test = { workspace = true } +rattler_conda_types = { path = "../rattler_conda_types", version = "0.22.0", default-features = false } [features] default = ['native-tls'] -native-tls = ['reqwest/native-tls'] +native-tls = ['reqwest/native-tls', 'reqwest/native-tls-alpn'] rustls-tls = ['reqwest/rustls-tls'] -sparse = ["rattler_conda_types", "memmap2", "ouroboros", "superslice", "itertools", "serde_json/raw_value", "bytes"] +sparse = ["rattler_conda_types", "memmap2", "ouroboros", "superslice", "itertools", "serde_json/raw_value"] +gateway = ["sparse", "http", "http-cache-semantics", "parking_lot", "async-trait"] diff --git a/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs b/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs index d003f5c6d..8e7cc3620 100644 --- a/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs +++ b/crates/rattler_repodata_gateway/src/fetch/jlap/mod.rs @@ -70,7 +70,8 @@ //! &client, //! subdir_url, //! repo_data_state, -//! ¤t_repo_data +//! ¤t_repo_data, +//! None //! ).await.unwrap(); //! //! // Now we can use the `updated_jlap_state` object to update our `.info.json` file @@ -90,8 +91,8 @@ use reqwest::{ }; use reqwest_middleware::ClientWithMiddleware; use serde::{Deserialize, Serialize}; +use serde_json::Value; use serde_with::serde_as; -use std::collections::{BTreeMap, HashMap}; use std::io::Write; use std::iter::Iterator; use std::path::Path; @@ -99,10 +100,12 @@ use std::str; use std::str::FromStr; use std::sync::Arc; use tempfile::NamedTempFile; -use tokio::task::JoinError; use url::Url; pub use crate::fetch::cache::{JLAPFooter, JLAPState, RepoDataState}; +use crate::reporter::ResponseReporterExt; +use crate::utils::{run_blocking_task, Cancelled}; +use crate::Reporter; /// File suffix for JLAP file pub const JLAP_FILE_SUFFIX: &str = "jlap"; @@ -164,6 +167,12 @@ pub enum JLAPError { Cancelled, } +impl From for JLAPError { + fn from(_: Cancelled) -> Self { + JLAPError::Cancelled + } +} + impl From for JLAPError { fn from(value: reqwest_middleware::Error) -> Self { Self::HTTP(value.redact()) @@ -304,6 +313,7 @@ impl<'a> JLAPResponse<'a> { &self, repo_data_json_path: &Path, hash: Output, + reporter: Option>, ) -> Result { // We use the current hash to find which patches we need to apply let current_idx = self.patches.iter().position(|patch| patch.from == hash); @@ -313,18 +323,12 @@ impl<'a> JLAPResponse<'a> { // Apply the patches on a blocking thread. Applying the patches is a relatively CPU intense // operation and we don't want to block the tokio runtime. - let patches = self.patches.clone(); + let repo_data_path = self.patches.clone(); let repo_data_json_path = repo_data_json_path.to_path_buf(); - match tokio::task::spawn_blocking(move || { - apply_jlap_patches(patches, idx, &repo_data_json_path) + run_blocking_task(move || { + apply_jlap_patches(repo_data_path, idx, &repo_data_json_path, reporter) }) .await - .map_err(JoinError::try_into_panic) - { - Ok(hash) => hash, - Err(Ok(reason)) => std::panic::resume_unwind(reason), - Err(_) => Err(JLAPError::Cancelled), - } } /// Returns a new [`JLAPState`] based on values in [`JLAPResponse`] struct @@ -410,6 +414,7 @@ pub async fn patch_repo_data( subdir_url: Url, repo_data_state: RepoDataState, repo_data_json_path: &Path, + reporter: Option>, ) -> Result<(JLAPState, Blake2b256Hash), JLAPError> { // Determine what we should use as our starting state let mut jlap_state = get_jlap_state(repo_data_state.jlap); @@ -418,12 +423,19 @@ pub async fn patch_repo_data( .join(JLAP_FILE_NAME) .expect("Valid URLs should always be join-able with this constant value"); + let download_report = reporter + .as_deref() + .map(|reporter| (reporter, reporter.on_download_start(&jlap_url))); let (response, position) = - fetch_jlap_with_retry(jlap_url.as_str(), client, jlap_state.position).await?; - let response_text = match response.text().await { + fetch_jlap_with_retry(&jlap_url, client, jlap_state.position).await?; + let jlap_response_url = response.url().clone(); + let response_text = match response.text_with_progress(download_report).await { Ok(value) => value, Err(error) => return Err(error.into()), }; + if let Some((reporter, index)) = download_report { + reporter.on_download_complete(&jlap_response_url, index); + } // Update position as it may have changed jlap_state.position = position; @@ -445,7 +457,7 @@ pub async fn patch_repo_data( } // Applies patches and returns early if an error is encountered - let hash = jlap.apply(repo_data_json_path, hash).await?; + let hash = jlap.apply(repo_data_json_path, hash, reporter).await?; // Patches were applied successfully, so we need to update the position Ok((jlap.get_state(jlap.new_position, new_iv), hash)) @@ -453,11 +465,11 @@ pub async fn patch_repo_data( /// Fetches a JLAP response from server async fn fetch_jlap( - url: &str, + url: &Url, client: &ClientWithMiddleware, range: &str, ) -> reqwest_middleware::Result { - let request_builder = client.get(url); + let request_builder = client.get(url.clone()); let mut headers = HeaderMap::default(); headers.insert( @@ -477,7 +489,7 @@ async fn fetch_jlap( /// We return a new value for position if this was triggered so that we can update the /// `JLAPState` accordingly. async fn fetch_jlap_with_retry( - url: &str, + url: &Url, client: &ClientWithMiddleware, position: u64, ) -> Result<(Response, u64), JLAPError> { @@ -502,40 +514,6 @@ async fn fetch_jlap_with_retry( } } -#[derive(Serialize, Deserialize, Default)] -struct OrderedRepoData { - info: Option>, - - #[serde(serialize_with = "ordered_map")] - packages: Option>>, - - #[serde(serialize_with = "ordered_map", rename = "packages.conda")] - packages_conda: Option>>, - - removed: Option>, - - repodata_version: Option, -} - -fn ordered_map( - value: &Option>>, - serializer: S, -) -> Result -where - S: serde::Serializer, -{ - match value { - Some(value) => { - let ordered: BTreeMap<_, _> = value - .iter() - .map(|(key, packages)| (key, packages.iter().collect::>())) - .collect(); - ordered.serialize(serializer) - } - None => serializer.serialize_none(), - } -} - /// Applies JLAP patches to a `repodata.json` file /// /// This is a multi-step process that involves: @@ -548,14 +526,28 @@ fn apply_jlap_patches( patches: Arc<[Patch]>, start_index: usize, repo_data_path: &Path, + reporter: Option>, ) -> Result { + let report = reporter + .as_deref() + .map(|reporter| (reporter, reporter.on_jlap_start())); + + if let Some((reporter, index)) = report { + reporter.on_jlap_decode_start(index); + } + // Read the contents of the current repodata to a string let repo_data_contents = std::fs::read_to_string(repo_data_path).map_err(JLAPError::FileSystem)?; // Parse the JSON so we can manipulate it tracing::info!("parsing cached repodata.json as JSON"); - let mut doc = serde_json::from_str(&repo_data_contents).map_err(JLAPError::JSONParse)?; + let mut repo_data = + serde_json::from_str::(&repo_data_contents).map_err(JLAPError::JSONParse)?; + + if let Some((reporter, index)) = report { + reporter.on_jlap_decode_completed(index); + } // Apply any patches that we have not already applied tracing::info!( @@ -563,21 +555,22 @@ fn apply_jlap_patches( start_index + 1, patches.len() ); - for patch in patches[start_index..].iter() { - if let Err(error) = json_patch::patch_unsafe(&mut doc, &patch.patch) { + for (patch_index, patch) in patches[start_index..].iter().enumerate() { + if let Some((reporter, index)) = report { + reporter.on_jlap_apply_patch(index, patch_index, patches.len()); + } + if let Err(error) = json_patch::patch_unsafe(&mut repo_data, &patch.patch) { return Err(JLAPError::JSONPatch(error)); } } - // Order the json - tracing::info!("converting patched JSON back to repodata"); - let ordered_doc: OrderedRepoData = serde_json::from_value(doc).map_err(JLAPError::JSONParse)?; + if let Some((reporter, index)) = report { + reporter.on_jlap_apply_patches_completed(index); + reporter.on_jlap_encode_start(index); + } // Convert the json to bytes, but we don't really care about formatting. - let mut updated_json = serde_json::to_string(&ordered_doc).map_err(JLAPError::JSONParse)?; - - // We need to add an extra newline character to the end of our string so the hashes match - updated_json.insert(updated_json.len(), '\n'); + let updated_json = serde_json::to_string(&repo_data).map_err(JLAPError::JSONParse)?; // Write the content to disk and immediately compute the hash of the file contents. tracing::info!("writing patched repodata to disk"); @@ -595,6 +588,11 @@ fn apply_jlap_patches( file.persist(repo_data_path) .map_err(|e| JLAPError::FileSystem(e.error))?; + if let Some((reporter, index)) = report { + reporter.on_jlap_encode_completed(index); + reporter.on_jlap_completed(index); + } + Ok(hash) } @@ -779,12 +777,11 @@ mod test { }, "removed": [], "repodata_version": 1 -} -"#; +}"#; - const FAKE_REPO_DATA_UPDATE_ONE: &str = "{\"info\":{\"subdir\":\"osx-64\"},\"packages\":{},\"packages.conda\":{\"zstd-1.5.4-hc035e20_0.conda\":{\"build\":\"hc035e20_0\",\"build_number\":0,\"depends\":[\"libcxx >=14.0.6\",\"lz4-c >=1.9.4,<1.10.0a0\",\"xz >=5.2.10,<6.0a0\",\"zlib >=1.2.13,<1.3.0a0\"],\"license\":\"BSD-3-Clause AND GPL-2.0-or-later\",\"license_family\":\"BSD\",\"md5\":\"f284fea068c51b1a0eaea3ac58c300c0\",\"name\":\"zstd\",\"sha256\":\"0af4513ef7ad7fa8854fa714130c25079f3744471fc106f47df80eb10c34429d\",\"size\":605550,\"subdir\":\"osx-64\",\"timestamp\":1680034665911,\"version\":\"1.5.4\"},\"zstd-1.5.5-hc035e20_0.conda\":{\"build\":\"hc035e20_0\",\"build_number\":0,\"depends\":[\"libcxx >=14.0.6\",\"lz4-c >=1.9.4,<1.10.0a0\",\"xz >=5.2.10,<6.0a0\",\"zlib >=1.2.13,<1.3.0a0\"],\"license\":\"BSD-3-Clause AND GPL-2.0-or-later\",\"license_family\":\"BSD\",\"md5\":\"5e0b7ddb1b7dc6b630e1f9a03499c19c\",\"name\":\"zstd\",\"sha256\":\"5b192501744907b841de036bb89f5a2776b4cac5795ccc25dcaebeac784db038\",\"size\":622467,\"subdir\":\"osx-64\",\"timestamp\":1681304595869,\"version\":\"1.5.5\"}},\"removed\":[],\"repodata_version\":1}\n"; + const FAKE_REPO_DATA_UPDATE_ONE: &str = "{\"info\":{\"subdir\":\"osx-64\"},\"packages\":{},\"packages.conda\":{\"zstd-1.5.4-hc035e20_0.conda\":{\"build\":\"hc035e20_0\",\"build_number\":0,\"depends\":[\"libcxx >=14.0.6\",\"lz4-c >=1.9.4,<1.10.0a0\",\"xz >=5.2.10,<6.0a0\",\"zlib >=1.2.13,<1.3.0a0\"],\"license\":\"BSD-3-Clause AND GPL-2.0-or-later\",\"license_family\":\"BSD\",\"md5\":\"f284fea068c51b1a0eaea3ac58c300c0\",\"name\":\"zstd\",\"sha256\":\"0af4513ef7ad7fa8854fa714130c25079f3744471fc106f47df80eb10c34429d\",\"size\":605550,\"subdir\":\"osx-64\",\"timestamp\":1680034665911,\"version\":\"1.5.4\"},\"zstd-1.5.5-hc035e20_0.conda\":{\"build\":\"hc035e20_0\",\"build_number\":0,\"depends\":[\"libcxx >=14.0.6\",\"lz4-c >=1.9.4,<1.10.0a0\",\"xz >=5.2.10,<6.0a0\",\"zlib >=1.2.13,<1.3.0a0\"],\"license\":\"BSD-3-Clause AND GPL-2.0-or-later\",\"license_family\":\"BSD\",\"md5\":\"5e0b7ddb1b7dc6b630e1f9a03499c19c\",\"name\":\"zstd\",\"sha256\":\"5b192501744907b841de036bb89f5a2776b4cac5795ccc25dcaebeac784db038\",\"size\":622467,\"subdir\":\"osx-64\",\"timestamp\":1681304595869,\"version\":\"1.5.5\"}},\"removed\":[],\"repodata_version\":1}"; - const FAKE_REPO_DATA_UPDATE_TWO: &str = "{\"info\":{\"subdir\":\"osx-64\"},\"packages\":{},\"packages.conda\":{\"zstd-1.5.4-hc035e20_0.conda\":{\"build\":\"hc035e20_0\",\"build_number\":0,\"depends\":[\"libcxx >=14.0.6\",\"lz4-c >=1.9.4,<1.10.0a0\",\"xz >=5.2.10,<6.0a0\",\"zlib >=1.2.13,<1.3.0a0\"],\"license\":\"BSD-3-Clause AND GPL-2.0-or-later\",\"license_family\":\"BSD\",\"md5\":\"f284fea068c51b1a0eaea3ac58c300c0\",\"name\":\"zstd\",\"sha256\":\"0af4513ef7ad7fa8854fa714130c25079f3744471fc106f47df80eb10c34429d\",\"size\":605550,\"subdir\":\"osx-64\",\"timestamp\":1680034665911,\"version\":\"1.5.4\"},\"zstd-1.5.5-hc035e20_0.conda\":{\"build\":\"hc035e20_0\",\"build_number\":0,\"depends\":[\"libcxx >=14.0.6\",\"lz4-c >=1.9.4,<1.10.0a0\",\"xz >=5.2.10,<6.0a0\",\"zlib >=1.2.13,<1.3.0a0\"],\"license\":\"BSD-3-Clause AND GPL-2.0-or-later\",\"license_family\":\"BSD\",\"md5\":\"5e0b7ddb1b7dc6b630e1f9a03499c19c\",\"name\":\"zstd\",\"sha256\":\"5b192501744907b841de036bb89f5a2776b4cac5795ccc25dcaebeac784db038\",\"size\":622467,\"subdir\":\"osx-64\",\"timestamp\":1681304595869,\"version\":\"1.5.5\"},\"zstd-static-1.4.5-hb1e8313_0.conda\":{\"build\":\"hb1e8313_0\",\"build_number\":0,\"depends\":[\"libcxx >=10.0.0\",\"zstd 1.4.5 h41d2c2f_0\"],\"license\":\"BSD 3-Clause\",\"md5\":\"5447986040e0b73d6c681a4d8f615d6c\",\"name\":\"zstd-static\",\"sha256\":\"3759ab53ff8320d35c6db00d34059ba99058eeec1cbdd0da968c5e12f73f7658\",\"size\":13930,\"subdir\":\"osx-64\",\"timestamp\":1595965109852,\"version\":\"1.4.5\"}},\"removed\":[],\"repodata_version\":1}\n"; + const FAKE_REPO_DATA_UPDATE_TWO: &str = "{\"info\":{\"subdir\":\"osx-64\"},\"packages\":{},\"packages.conda\":{\"zstd-1.5.4-hc035e20_0.conda\":{\"build\":\"hc035e20_0\",\"build_number\":0,\"depends\":[\"libcxx >=14.0.6\",\"lz4-c >=1.9.4,<1.10.0a0\",\"xz >=5.2.10,<6.0a0\",\"zlib >=1.2.13,<1.3.0a0\"],\"license\":\"BSD-3-Clause AND GPL-2.0-or-later\",\"license_family\":\"BSD\",\"md5\":\"f284fea068c51b1a0eaea3ac58c300c0\",\"name\":\"zstd\",\"sha256\":\"0af4513ef7ad7fa8854fa714130c25079f3744471fc106f47df80eb10c34429d\",\"size\":605550,\"subdir\":\"osx-64\",\"timestamp\":1680034665911,\"version\":\"1.5.4\"},\"zstd-1.5.5-hc035e20_0.conda\":{\"build\":\"hc035e20_0\",\"build_number\":0,\"depends\":[\"libcxx >=14.0.6\",\"lz4-c >=1.9.4,<1.10.0a0\",\"xz >=5.2.10,<6.0a0\",\"zlib >=1.2.13,<1.3.0a0\"],\"license\":\"BSD-3-Clause AND GPL-2.0-or-later\",\"license_family\":\"BSD\",\"md5\":\"5e0b7ddb1b7dc6b630e1f9a03499c19c\",\"name\":\"zstd\",\"sha256\":\"5b192501744907b841de036bb89f5a2776b4cac5795ccc25dcaebeac784db038\",\"size\":622467,\"subdir\":\"osx-64\",\"timestamp\":1681304595869,\"version\":\"1.5.5\"},\"zstd-static-1.4.5-hb1e8313_0.conda\":{\"build\":\"hb1e8313_0\",\"build_number\":0,\"depends\":[\"libcxx >=10.0.0\",\"zstd 1.4.5 h41d2c2f_0\"],\"license\":\"BSD 3-Clause\",\"md5\":\"5447986040e0b73d6c681a4d8f615d6c\",\"name\":\"zstd-static\",\"sha256\":\"3759ab53ff8320d35c6db00d34059ba99058eeec1cbdd0da968c5e12f73f7658\",\"size\":13930,\"subdir\":\"osx-64\",\"timestamp\":1595965109852,\"version\":\"1.4.5\"}},\"removed\":[],\"repodata_version\":1}"; const FAKE_REPO_DATA_UPDATE_ONE_HASH: &str = "9b76165ba998f77b2f50342006192bf28817dad474d78d760ab12cc0260e3ed9"; @@ -951,6 +948,7 @@ mod test { test_env.server_url, test_env.repo_data_state, &test_env.cache_repo_data, + None, ) .await .unwrap(); diff --git a/crates/rattler_repodata_gateway/src/fetch/mod.rs b/crates/rattler_repodata_gateway/src/fetch/mod.rs index b7b371691..6873e1e15 100644 --- a/crates/rattler_repodata_gateway/src/fetch/mod.rs +++ b/crates/rattler_repodata_gateway/src/fetch/mod.rs @@ -1,6 +1,8 @@ //! This module provides functionality to download and cache `repodata.json` from a remote location. +use crate::reporter::ResponseReporterExt; use crate::utils::{AsyncEncoding, Encoding, LockedFile}; +use crate::Reporter; use cache::{CacheHeaders, Expiring, RepoDataState}; use cache_control::{Cachability, CacheControl}; use futures::{future::ready, FutureExt, TryStreamExt}; @@ -11,6 +13,7 @@ use reqwest::{ header::{HeaderMap, HeaderValue}, Response, StatusCode, }; +use std::sync::Arc; use std::{ io::ErrorKind, path::{Path, PathBuf}, @@ -24,19 +27,16 @@ use url::Url; mod cache; pub mod jlap; -/// Type alias for function to report progress while downloading repodata -pub type ProgressFunc = Box; - /// `RepoData` could not be found for given channel and platform #[derive(Debug, thiserror::Error)] pub enum RepoDataNotFoundError { /// There was an error on the Http request #[error(transparent)] - HttpError(reqwest_middleware::Error), + HttpError(reqwest::Error), /// There was a file system error #[error(transparent)] - FileSystemError(std::io::Error), + FileSystemError(#[from] std::io::Error), } #[allow(missing_docs)] @@ -88,15 +88,9 @@ impl From for FetchRepoDataError { } } -impl From for RepoDataNotFoundError { - fn from(err: reqwest_middleware::Error) -> Self { - Self::HttpError(err.redact()) - } -} - impl From for RepoDataNotFoundError { fn from(err: reqwest::Error) -> Self { - Self::HttpError(err.redact().into()) + Self::HttpError(err.redact()) } } @@ -197,17 +191,6 @@ impl Default for FetchRepoDataOptions { } } -/// A struct that provides information about download progress. -#[derive(Debug, Clone)] -pub struct DownloadProgress { - /// The number of bytes already downloaded - pub bytes: u64, - - /// The total number of bytes to download. Or `None` if this is not known. This can happen - /// if the server does not supply a `Content-Length` header. - pub total: Option, -} - /// The result of [`fetch_repo_data`]. #[derive(Debug)] pub struct CachedRepoData { @@ -321,7 +304,7 @@ pub async fn fetch_repo_data( client: reqwest_middleware::ClientWithMiddleware, cache_path: PathBuf, options: FetchRepoDataOptions, - progress: Option, + reporter: Option>, ) -> Result { let subdir_url = normalize_subdir_url(subdir_url); @@ -429,6 +412,7 @@ pub async fn fetch_repo_data( subdir_url.clone(), repo_data_state.clone(), &repo_data_json_path, + reporter.clone(), ) .await { @@ -505,6 +489,9 @@ pub async fn fetch_repo_data( cache_headers.add_to_request(&mut headers); } // Send the request and wait for a reply + let download_reporter = reporter + .as_deref() + .map(|r| (r, r.on_download_start(&repo_data_url))); let response = match request_builder.headers(headers).send().await { Ok(response) if response.status() == StatusCode::NOT_FOUND => { return Err(FetchRepoDataError::NotFound(RepoDataNotFoundError::from( @@ -551,6 +538,7 @@ pub async fn fetch_repo_data( let cache_headers = CacheHeaders::from(&response); // Stream the content to a temporary file + let response_url = response.url().clone(); let (temp_file, blake2_hash) = stream_and_decode_to_file( repo_data_url.clone(), response, @@ -562,10 +550,14 @@ pub async fn fetch_repo_data( Encoding::Passthrough }, &cache_path, - progress, + download_reporter, ) .await?; + if let Some((reporter, index)) = download_reporter { + reporter.on_download_complete(&response_url, index); + } + // Persist the file to its final destination let repo_data_destination_path = repo_data_json_path.clone(); let repo_data_json_metadata = tokio::task::spawn_blocking(move || { @@ -625,41 +617,20 @@ async fn stream_and_decode_to_file( response: Response, content_encoding: Encoding, temp_dir: &Path, - mut progress_func: Option, + reporter: Option<(&dyn Reporter, usize)>, ) -> Result<(NamedTempFile, blake2::digest::Output), FetchRepoDataError> { - // Determine the length of the response in bytes and notify the listener that a download is - // starting. The response may be compressed. Decompression happens below. - let content_size = response.content_length(); - if let Some(progress_func) = progress_func.as_mut() { - progress_func(DownloadProgress { - bytes: 0, - total: content_size, - }); - } - // Determine the encoding of the response let transfer_encoding = Encoding::from(&response); // Convert the response into a byte stream + let mut total_bytes = 0; let bytes_stream = response - .bytes_stream() + .byte_stream_with_progress(reporter) + .inspect_ok(|bytes| { + total_bytes += bytes.len(); + }) .map_err(|e| std::io::Error::new(ErrorKind::Other, e)); - // Listen in on the bytes as they come from the response. Progress is tracked here instead of - // after decoding because that doesnt properly represent the number of bytes that are being - // transferred over the network. - let mut total_bytes = 0; - let total_bytes_mut = &mut total_bytes; - let bytes_stream = bytes_stream.inspect_ok(move |bytes| { - *total_bytes_mut += bytes.len() as u64; - if let Some(progress_func) = progress_func.as_mut() { - progress_func(DownloadProgress { - bytes: *total_bytes_mut, - total: content_size, - }); - } - }); - // Create a new stream from the byte stream that decodes the bytes using the transfer encoding // on the fly. let decoded_byte_stream = StreamReader::new(bytes_stream).decode(transfer_encoding); @@ -1038,19 +1009,18 @@ fn validate_cached_state( #[cfg(test)] mod test { - use super::{ - fetch_repo_data, CacheResult, CachedRepoData, DownloadProgress, FetchRepoDataOptions, - }; + use super::{fetch_repo_data, CacheResult, CachedRepoData, FetchRepoDataOptions}; use crate::fetch::{FetchRepoDataError, RepoDataNotFoundError}; use crate::utils::simple_channel_server::SimpleChannelServer; use crate::utils::Encoding; + use crate::Reporter; use assert_matches::assert_matches; use hex_literal::hex; use rattler_networking::AuthenticationMiddleware; use reqwest::Client; use reqwest_middleware::ClientWithMiddleware; use std::path::Path; - use std::sync::atomic::{AtomicU64, Ordering}; + use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use tempfile::TempDir; use tokio::io::AsyncWriteExt; @@ -1405,12 +1375,27 @@ mod test { std::fs::write(subdir_path.path().join("repodata.json"), FAKE_REPO_DATA).unwrap(); let server = SimpleChannelServer::new(subdir_path.path()).await; - let last_download_progress = Arc::new(AtomicU64::new(0)); - let last_download_progress_captured = last_download_progress.clone(); - let download_progress = move |progress: DownloadProgress| { - last_download_progress_captured.store(progress.bytes, Ordering::SeqCst); - assert_eq!(progress.total, Some(1110)); - }; + struct BasicReporter { + last_download_progress: AtomicUsize, + } + + impl Reporter for BasicReporter { + fn on_download_progress( + &self, + _url: &Url, + _index: usize, + bytes_downloaded: usize, + total_bytes: Option, + ) { + self.last_download_progress + .store(bytes_downloaded, Ordering::SeqCst); + assert_eq!(total_bytes, Some(1110)); + } + } + + let reporter = Arc::new(BasicReporter { + last_download_progress: AtomicUsize::new(0), + }); // Download the data from the channel with an empty cache. let cache_dir = TempDir::new().unwrap(); @@ -1419,12 +1404,12 @@ mod test { ClientWithMiddleware::from(Client::new()), cache_dir.into_path(), FetchRepoDataOptions::default(), - Some(Box::new(download_progress)), + Some(reporter.clone()), ) .await .unwrap(); - assert_eq!(last_download_progress.load(Ordering::SeqCst), 1110); + assert_eq!(reporter.last_download_progress.load(Ordering::SeqCst), 1110); } #[tracing_test::traced_test] diff --git a/crates/rattler_repodata_gateway/src/gateway/barrier_cell.rs b/crates/rattler_repodata_gateway/src/gateway/barrier_cell.rs new file mode 100644 index 000000000..4a6b34fd5 --- /dev/null +++ b/crates/rattler_repodata_gateway/src/gateway/barrier_cell.rs @@ -0,0 +1,131 @@ +use std::{ + cell::UnsafeCell, + mem::MaybeUninit, + sync::atomic::{AtomicU8, Ordering}, +}; +use thiserror::Error; +use tokio::sync::Notify; + +/// A synchronization primitive that can be used to wait for a value to become available. +/// +/// The [`BarrierCell`] is initially empty, requesters can wait for a value to become available +/// using the `wait` method. Once a value is available, the `set` method can be used to set the +/// value in the cell. The `set` method can only be called once. If the `set` method is called +/// multiple times, it will return an error. When `set` is called all waiters will be notified. +pub struct BarrierCell { + state: AtomicU8, + value: UnsafeCell>, + notify: Notify, +} + +impl Drop for BarrierCell { + fn drop(&mut self) { + if self.state.load(Ordering::Acquire) == BarrierCellState::Initialized as u8 { + unsafe { self.value.get_mut().assume_init_drop() } + } + } +} + +unsafe impl Sync for BarrierCell {} + +unsafe impl Send for BarrierCell {} + +#[repr(u8)] +enum BarrierCellState { + Uninitialized, + Initializing, + Initialized, +} + +impl Default for BarrierCell { + fn default() -> Self { + Self::new() + } +} + +#[derive(Debug, Clone, Error)] +pub enum SetError { + #[error("cannot assign a BarrierCell twice")] + AlreadySet, +} + +impl BarrierCell { + /// Constructs a new instance. + pub fn new() -> Self { + Self { + state: AtomicU8::new(BarrierCellState::Uninitialized as u8), + value: UnsafeCell::new(MaybeUninit::uninit()), + notify: Notify::new(), + } + } + + /// Wait for a value to become available in the cell + pub async fn wait(&self) -> &T { + let notified = self.notify.notified(); + if self.state.load(Ordering::Acquire) != BarrierCellState::Initialized as u8 { + notified.await; + } + unsafe { (*self.value.get()).assume_init_ref() } + } + + /// Set the value in the cell, if the cell was already initialized this will return an error. + pub fn set(&self, value: T) -> Result<(), SetError> { + let state = self + .state + .fetch_max(BarrierCellState::Initializing as u8, Ordering::SeqCst); + + // If the state is larger than started writing, then either there is an active writer or + // the cell has already been initialized. + if state == BarrierCellState::Initialized as u8 { + return Err(SetError::AlreadySet); + } else { + unsafe { *self.value.get() = MaybeUninit::new(value) }; + self.state + .store(BarrierCellState::Initialized as u8, Ordering::Release); + + self.notify.notify_waiters(); + } + + Ok(()) + } +} + +#[cfg(test)] +mod test { + use super::BarrierCell; + use std::sync::Arc; + + /// Test that setting the barrier cell works, and we can wait on the value + #[tokio::test] + pub async fn test_barrier_cell() { + let barrier = Arc::new(BarrierCell::new()); + let barrier_clone = barrier.clone(); + + let handle = tokio::spawn(async move { + let value = barrier_clone.wait().await; + assert_eq!(*value, 42); + }); + + tokio::time::sleep(tokio::time::Duration::from_millis(10)).await; + barrier.set(42).unwrap(); + handle.await.unwrap(); + } + + /// Test that we cannot set the barrier cell twice + #[tokio::test] + pub async fn test_barrier_cell_set_twice() { + let barrier = Arc::new(BarrierCell::new()); + barrier.set(42).unwrap(); + assert!(barrier.set(42).is_err()); + } + + #[test] + pub fn test_drop() { + let barrier = BarrierCell::new(); + let arc = Arc::new(42); + barrier.set(arc.clone()).unwrap(); + assert_eq!(Arc::strong_count(&arc), 2); + drop(barrier); + assert_eq!(Arc::strong_count(&arc), 1); + } +} diff --git a/crates/rattler_repodata_gateway/src/gateway/builder.rs b/crates/rattler_repodata_gateway/src/gateway/builder.rs new file mode 100644 index 000000000..5211d49fd --- /dev/null +++ b/crates/rattler_repodata_gateway/src/gateway/builder.rs @@ -0,0 +1,101 @@ +use crate::gateway::GatewayInner; +use crate::{ChannelConfig, Gateway}; +use dashmap::DashMap; +use reqwest::Client; +use reqwest_middleware::ClientWithMiddleware; +use std::path::PathBuf; +use std::sync::Arc; + +/// A builder for constructing a [`Gateway`]. +#[derive(Default)] +pub struct GatewayBuilder { + channel_config: ChannelConfig, + client: Option, + cache: Option, + max_concurrent_requests: Option, +} + +impl GatewayBuilder { + /// New instance of the builder. + pub fn new() -> Self { + Self::default() + } + + /// Set the client to use for fetching repodata. + #[must_use] + pub fn with_client(mut self, client: ClientWithMiddleware) -> Self { + self.set_client(client); + self + } + + /// Set the client to use for fetching repodata. + pub fn set_client(&mut self, client: ClientWithMiddleware) -> &mut Self { + self.client = Some(client); + self + } + + /// Set the channel configuration to use for fetching repodata. + #[must_use] + pub fn with_channel_config(mut self, channel_config: ChannelConfig) -> Self { + self.set_channel_config(channel_config); + self + } + + /// Sets the channel configuration to use for fetching repodata. + pub fn set_channel_config(&mut self, channel_config: ChannelConfig) -> &mut Self { + self.channel_config = channel_config; + self + } + + /// Set the directory to use for caching repodata. + #[must_use] + pub fn with_cache_dir(mut self, cache: impl Into) -> Self { + self.set_cache_dir(cache); + self + } + + /// Set the directory to use for caching repodata. + pub fn set_cache_dir(&mut self, cache: impl Into) -> &mut Self { + self.cache = Some(cache.into()); + self + } + + /// Sets the maximum number of concurrent HTTP requests to make. + #[must_use] + pub fn with_max_concurrent_requests(mut self, max_concurrent_requests: usize) -> Self { + self.set_max_concurrent_requests(max_concurrent_requests); + self + } + + /// Sets the maximum number of concurrent HTTP requests to make. + pub fn set_max_concurrent_requests(&mut self, max_concurrent_requests: usize) -> &mut Self { + self.max_concurrent_requests = Some(max_concurrent_requests); + self + } + + /// Finish the construction of the gateway returning a constructed gateway. + pub fn finish(self) -> Gateway { + let client = self + .client + .unwrap_or_else(|| ClientWithMiddleware::from(Client::new())); + + let cache = self.cache.unwrap_or_else(|| { + dirs::cache_dir() + .unwrap_or_else(|| PathBuf::from(".")) + .join("rattler/cache") + }); + + let max_concurrent_requests = self.max_concurrent_requests.unwrap_or(100); + Gateway { + inner: Arc::new(GatewayInner { + subdirs: DashMap::default(), + client, + channel_config: self.channel_config, + cache, + concurrent_requests_semaphore: Arc::new(tokio::sync::Semaphore::new( + max_concurrent_requests, + )), + }), + } + } +} diff --git a/crates/rattler_repodata_gateway/src/gateway/channel_config.rs b/crates/rattler_repodata_gateway/src/gateway/channel_config.rs new file mode 100644 index 000000000..e9affbb42 --- /dev/null +++ b/crates/rattler_repodata_gateway/src/gateway/channel_config.rs @@ -0,0 +1,51 @@ +use crate::fetch::CacheAction; +use rattler_conda_types::Channel; +use std::collections::HashMap; + +/// Describes additional properties that influence how the gateway fetches repodata for a specific +/// channel. +#[derive(Debug, Clone)] +pub struct SourceConfig { + /// When enabled repodata can be fetched incrementally using JLAP (defaults to true) + pub jlap_enabled: bool, + + /// When enabled, the zstd variant will be used if available (defaults to true) + pub zstd_enabled: bool, + + /// When enabled, the bz2 variant will be used if available (defaults to true) + pub bz2_enabled: bool, + + /// Describes fetching repodata from a channel should interact with any + /// caches. + pub cache_action: CacheAction, +} + +impl Default for SourceConfig { + fn default() -> Self { + Self { + jlap_enabled: true, + zstd_enabled: true, + bz2_enabled: true, + cache_action: CacheAction::default(), + } + } +} + +/// Describes additional information for fetching channels. +#[derive(Debug, Default)] +pub struct ChannelConfig { + /// The default source configuration. If a channel does not have a specific source configuration + /// this configuration will be used. + pub default: SourceConfig, + + /// Describes per channel properties that influence how the gateway fetches repodata. + pub per_channel: HashMap, +} + +impl ChannelConfig { + /// Returns the source configuration for the given channel. If the channel does not have a + /// specific source configuration the default source configuration will be returned. + pub fn get(&self, channel: &Channel) -> &SourceConfig { + self.per_channel.get(channel).unwrap_or(&self.default) + } +} diff --git a/crates/rattler_repodata_gateway/src/gateway/error.rs b/crates/rattler_repodata_gateway/src/gateway/error.rs new file mode 100644 index 000000000..265dec4d1 --- /dev/null +++ b/crates/rattler_repodata_gateway/src/gateway/error.rs @@ -0,0 +1,101 @@ +use crate::fetch; +use crate::fetch::{FetchRepoDataError, RepoDataNotFoundError}; +use crate::utils::Cancelled; +use rattler_conda_types::Channel; +use rattler_networking::Redact; +use reqwest_middleware::Error; +use std::fmt::{Display, Formatter}; +use std::io; +use thiserror::Error; + +#[derive(Debug, Error)] +#[allow(missing_docs)] +pub enum GatewayError { + #[error("{0}")] + IoError(String, #[source] std::io::Error), + + #[error(transparent)] + ReqwestError(reqwest::Error), + + #[error(transparent)] + ReqwestMiddlewareError(anyhow::Error), + + #[error(transparent)] + FetchRepoDataError(#[from] FetchRepoDataError), + + #[error("{0}")] + UnsupportedUrl(String), + + #[error("{0}")] + Generic(String), + + #[error(transparent)] + SubdirNotFoundError(#[from] SubdirNotFoundError), + + #[error("the operation was cancelled")] + Cancelled, +} + +impl From for GatewayError { + fn from(_: Cancelled) -> Self { + GatewayError::Cancelled + } +} + +impl From for GatewayError { + fn from(value: reqwest_middleware::Error) -> Self { + match value { + Error::Reqwest(err) => err.into(), + Error::Middleware(err) => GatewayError::ReqwestMiddlewareError(err), + } + } +} + +impl From for GatewayError { + fn from(value: reqwest::Error) -> Self { + GatewayError::ReqwestError(value.redact()) + } +} + +#[derive(Debug, Error)] +pub enum HttpOrFilesystemError { + #[error(transparent)] + Http(#[from] reqwest::Error), + + #[error(transparent)] + Filesystem(#[from] io::Error), +} + +impl From for HttpOrFilesystemError { + fn from(value: RepoDataNotFoundError) -> Self { + match value { + RepoDataNotFoundError::HttpError(err) => HttpOrFilesystemError::Http(err), + RepoDataNotFoundError::FileSystemError(err) => HttpOrFilesystemError::Filesystem(err), + } + } +} + +/// An error that is raised when a subdirectory of a repository is not found. +#[derive(Debug, Error)] +pub struct SubdirNotFoundError { + /// The name of the subdirectory that was not found. + pub subdir: String, + + /// The channel that was searched. + pub channel: Channel, + + /// The error that caused the subdirectory to not be found. + #[source] + pub source: HttpOrFilesystemError, +} + +impl Display for SubdirNotFoundError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!( + f, + "could not find subdir '{}' in channel '{}'", + self.subdir, + self.channel.canonical_name() + ) + } +} diff --git a/crates/rattler_repodata_gateway/src/gateway/local_subdir.rs b/crates/rattler_repodata_gateway/src/gateway/local_subdir.rs new file mode 100644 index 000000000..0682a7c53 --- /dev/null +++ b/crates/rattler_repodata_gateway/src/gateway/local_subdir.rs @@ -0,0 +1,67 @@ +use crate::gateway::error::SubdirNotFoundError; +use crate::gateway::subdir::SubdirClient; +use crate::gateway::GatewayError; +use crate::sparse::SparseRepoData; +use crate::utils::run_blocking_task; +use crate::Reporter; +use rattler_conda_types::{Channel, PackageName, RepoDataRecord}; +use std::path::Path; +use std::sync::Arc; + +/// A client that can be used to fetch repodata for a specific subdirectory from a local directory. +/// +/// Use the [`LocalSubdirClient::from_directory`] function to create a new instance of this client. +pub struct LocalSubdirClient { + sparse: Arc, +} + +impl LocalSubdirClient { + pub async fn from_channel_subdir( + repodata_path: &Path, + channel: Channel, + subdir: &str, + ) -> Result { + let repodata_path = repodata_path.to_path_buf(); + let subdir = subdir.to_string(); + let sparse = run_blocking_task(move || { + SparseRepoData::new(channel.clone(), subdir.clone(), &repodata_path, None).map_err( + |err| { + if err.kind() == std::io::ErrorKind::NotFound { + GatewayError::SubdirNotFoundError(SubdirNotFoundError { + channel: channel.clone(), + subdir: subdir.clone(), + source: err.into(), + }) + } else { + GatewayError::IoError("failed to parse repodata.json".to_string(), err) + } + }, + ) + }) + .await?; + + Ok(Self { + sparse: Arc::new(sparse), + }) + } +} + +#[async_trait::async_trait] +impl SubdirClient for LocalSubdirClient { + async fn fetch_package_records( + &self, + name: &PackageName, + _reporter: Option<&dyn Reporter>, + ) -> Result, GatewayError> { + let sparse_repodata = self.sparse.clone(); + let name = name.clone(); + run_blocking_task(move || match sparse_repodata.load_records(&name) { + Ok(records) => Ok(records.into()), + Err(err) => Err(GatewayError::IoError( + "failed to extract repodata records from sparse repodata".to_string(), + err, + )), + }) + .await + } +} diff --git a/crates/rattler_repodata_gateway/src/gateway/mod.rs b/crates/rattler_repodata_gateway/src/gateway/mod.rs new file mode 100644 index 000000000..30c9c3206 --- /dev/null +++ b/crates/rattler_repodata_gateway/src/gateway/mod.rs @@ -0,0 +1,406 @@ +mod barrier_cell; +mod builder; +mod channel_config; +mod error; +mod local_subdir; +mod query; +mod remote_subdir; +mod repo_data; +mod sharded_subdir; +mod subdir; + +pub use barrier_cell::BarrierCell; +pub use builder::GatewayBuilder; +pub use channel_config::{ChannelConfig, SourceConfig}; +pub use error::GatewayError; +pub use query::GatewayQuery; +pub use repo_data::RepoData; + +use crate::{fetch::FetchRepoDataError, gateway::error::SubdirNotFoundError, Reporter}; +use dashmap::{mapref::entry::Entry, DashMap}; +use file_url::url_to_path; +use local_subdir::LocalSubdirClient; +use rattler_conda_types::{Channel, MatchSpec, Platform}; +use reqwest_middleware::ClientWithMiddleware; +use std::{ + path::PathBuf, + sync::{Arc, Weak}, +}; +use subdir::{Subdir, SubdirData}; +use tokio::sync::broadcast; +use tracing::instrument; + +/// Central access point for high level queries about +/// [`rattler_conda_types::RepoDataRecord`]s from different channels. +/// +/// The gateway is responsible for fetching and caching repodata. Requests are +/// deduplicated which means that if multiple requests are made for the same +/// repodata only the first request will actually fetch the data. All other +/// requests will wait for the first request to complete and then return the +/// same data. +/// +/// The gateway is thread-safe and can be shared between multiple threads. The +/// gateway struct itself uses internal reference counting and is cheaply +/// clonable. There is no need to wrap the gateway in an `Arc`. +#[derive(Clone)] +pub struct Gateway { + inner: Arc, +} + +impl Default for Gateway { + fn default() -> Self { + Gateway::new() + } +} + +impl Gateway { + /// Constructs a simple gateway with the default configuration. Use [`Gateway::builder`] if you + /// want more control over how the gateway is constructed. + pub fn new() -> Self { + Gateway::builder().finish() + } + + /// Constructs a new gateway with the given client and channel configuration. + pub fn builder() -> GatewayBuilder { + GatewayBuilder::default() + } + + /// Constructs a new `GatewayQuery` which can be used to query repodata records. + pub fn query( + &self, + channels: ChannelIter, + platforms: PlatformIter, + specs: PackageNameIter, + ) -> GatewayQuery + where + AsChannel: Into, + ChannelIter: IntoIterator, + PlatformIter: IntoIterator, + ::IntoIter: Clone, + PackageNameIter: IntoIterator, + IntoMatchSpec: Into, + { + GatewayQuery::new( + self.inner.clone(), + channels.into_iter().map(Into::into).collect(), + platforms.into_iter().collect(), + specs.into_iter().map(Into::into).collect(), + ) + } +} + +struct GatewayInner { + /// A map of subdirectories for each channel and platform. + subdirs: DashMap<(Channel, Platform), PendingOrFetched>>, + + /// The client to use to fetch repodata. + client: ClientWithMiddleware, + + /// The channel configuration + channel_config: ChannelConfig, + + /// The directory to store any cache + cache: PathBuf, + + /// A semaphore to limit the number of concurrent requests. + concurrent_requests_semaphore: Arc, +} + +impl GatewayInner { + /// Returns the [`Subdir`] for the given channel and platform. This + /// function will create the [`Subdir`] if it does not exist yet, otherwise + /// it will return the previously created subdir. + /// + /// If multiple threads request the same subdir their requests will be + /// coalesced, and they will all receive the same subdir. If an error + /// occurs while creating the subdir all waiting tasks will also return an + /// error. + #[instrument(skip(self, reporter), err)] + async fn get_or_create_subdir( + &self, + channel: &Channel, + platform: Platform, + reporter: Option>, + ) -> Result, GatewayError> { + let sender = match self.subdirs.entry((channel.clone(), platform)) { + Entry::Vacant(entry) => { + // Construct a sender so other tasks can subscribe + let (sender, _) = broadcast::channel(1); + let sender = Arc::new(sender); + + // Modify the current entry to the pending entry, this is an atomic operation + // because who holds the entry holds mutable access. + entry.insert(PendingOrFetched::Pending(Arc::downgrade(&sender))); + + sender + } + Entry::Occupied(mut entry) => { + let subdir = entry.get(); + match subdir { + PendingOrFetched::Pending(sender) => { + let sender = sender.upgrade(); + + if let Some(sender) = sender { + // Create a receiver before we drop the entry. While we hold on to + // the entry we have exclusive access to it, this means the task + // currently fetching the subdir will not be able to store a value + // until we drop the entry. + // By creating the receiver here we ensure that we are subscribed + // before the other tasks sends a value over the channel. + let mut receiver = sender.subscribe(); + + // Explicitly drop the entry, so we don't block any other tasks. + drop(entry); + + // The sender is still active, so we can wait for the subdir to be + // created. + return match receiver.recv().await { + Ok(subdir) => Ok(subdir), + Err(_) => { + // If this happens the sender was dropped. + Err(GatewayError::IoError( + "a coalesced request failed".to_string(), + std::io::ErrorKind::Other.into(), + )) + } + }; + } else { + // Construct a sender so other tasks can subscribe + let (sender, _) = broadcast::channel(1); + let sender = Arc::new(sender); + + // Modify the current entry to the pending entry, this is an atomic + // operation because who holds the entry holds mutable access. + entry.insert(PendingOrFetched::Pending(Arc::downgrade(&sender))); + + sender + } + } + PendingOrFetched::Fetched(records) => return Ok(records.clone()), + } + } + }; + + // At this point we have exclusive write access to this specific entry. All other tasks + // will find a pending entry and will wait for the records to become available. + // + // Let's start by creating the subdir. If an error occurs we immediately return the error. + // This will drop the sender and all other waiting tasks will receive an error. + let subdir = Arc::new(self.create_subdir(channel, platform, reporter).await?); + + // Store the fetched files in the entry. + self.subdirs.insert( + (channel.clone(), platform), + PendingOrFetched::Fetched(subdir.clone()), + ); + + // Send the records to all waiting tasks. We don't care if there are no receivers, so we + // drop the error. + let _ = sender.send(subdir.clone()); + + Ok(subdir) + } + + async fn create_subdir( + &self, + channel: &Channel, + platform: Platform, + reporter: Option>, + ) -> Result { + let url = channel.platform_url(platform); + let subdir_data = if url.scheme() == "file" { + if let Some(path) = url_to_path(&url) { + LocalSubdirClient::from_channel_subdir( + &path.join("repodata.json"), + channel.clone(), + platform.as_str(), + ) + .await + .map(SubdirData::from_client) + } else { + return Err(GatewayError::UnsupportedUrl( + "unsupported file based url".to_string(), + )); + } + } else if url.scheme() == "http" || url.scheme() == "https" { + if url.host_str() == Some("fast.prefiks.dev") + || url.host_str() == Some("fast.prefix.dev") + { + sharded_subdir::ShardedSubdir::new( + channel.clone(), + platform.to_string(), + self.client.clone(), + self.cache.clone(), + self.concurrent_requests_semaphore.clone(), + reporter.as_deref(), + ) + .await + .map(SubdirData::from_client) + } else { + remote_subdir::RemoteSubdirClient::new( + channel.clone(), + platform, + self.client.clone(), + self.cache.clone(), + self.channel_config.get(channel).clone(), + reporter, + ) + .await + .map(SubdirData::from_client) + } + } else { + return Err(GatewayError::UnsupportedUrl(format!( + "'{}' is not a supported scheme", + url.scheme() + ))); + }; + + match subdir_data { + Ok(client) => Ok(Subdir::Found(client)), + Err(GatewayError::SubdirNotFoundError(err)) if platform != Platform::NoArch => { + // If the subdir was not found and the platform is not `noarch` we assume its just + // empty. + tracing::info!( + "subdir {} of channel {} was not found, ignoring", + err.subdir, + err.channel.canonical_name() + ); + Ok(Subdir::NotFound) + } + Err(GatewayError::FetchRepoDataError(FetchRepoDataError::NotFound(err))) => { + Err(SubdirNotFoundError { + subdir: platform.to_string(), + channel: channel.clone(), + source: err.into(), + } + .into()) + } + Err(err) => Err(err), + } + } +} + +/// A record that is either pending or has been fetched. +#[derive(Clone)] +enum PendingOrFetched { + Pending(Weak>), + Fetched(T), +} + +#[cfg(test)] +mod test { + use crate::gateway::Gateway; + use crate::utils::simple_channel_server::SimpleChannelServer; + use crate::GatewayError; + use rattler_conda_types::{Channel, ChannelConfig, PackageName, Platform}; + use rstest::rstest; + use std::path::{Path, PathBuf}; + use std::str::FromStr; + use std::time::Instant; + use url::Url; + + fn local_conda_forge() -> Channel { + Channel::from_directory( + &Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/channels/conda-forge"), + ) + } + + async fn remote_conda_forge() -> SimpleChannelServer { + SimpleChannelServer::new( + Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/channels/conda-forge"), + ) + .await + } + + #[tokio::test] + async fn test_local_gateway() { + let gateway = Gateway::new(); + + let records = gateway + .query( + vec![local_conda_forge()], + vec![Platform::Linux64, Platform::NoArch], + vec![PackageName::from_str("rubin-env").unwrap()].into_iter(), + ) + .recursive(true) + .await + .unwrap(); + + let total_records: usize = records.iter().map(|r| r.len()).sum(); + assert_eq!(total_records, 45060); + } + + #[tokio::test] + async fn test_remote_gateway() { + let gateway = Gateway::new(); + + let index = remote_conda_forge().await; + + let records = gateway + .query( + vec![index.channel()], + vec![Platform::Linux64, Platform::Win32, Platform::NoArch], + vec![PackageName::from_str("rubin-env").unwrap()].into_iter(), + ) + .recursive(true) + .await + .unwrap(); + + let total_records: usize = records.iter().map(|r| r.len()).sum(); + assert_eq!(total_records, 45060); + } + + #[rstest] + #[case::named("non-existing-channel")] + #[case::url("https://conda.anaconda.org/does-not-exist")] + #[case::file_url("file:///does/not/exist")] + #[case::win_path("c:/does-not-exist")] + #[case::unix_path("/does-not-exist")] + #[tokio::test] + async fn test_doesnt_exist(#[case] channel: &str) { + let gateway = Gateway::new(); + + let default_channel_config = ChannelConfig::default_with_root_dir(PathBuf::new()); + let err = gateway + .query( + vec![Channel::from_str(channel, &default_channel_config).unwrap()], + vec![Platform::Linux64, Platform::NoArch], + vec![PackageName::from_str("some-package").unwrap()].into_iter(), + ) + .await; + + assert_matches::assert_matches!(err, Err(GatewayError::SubdirNotFoundError(_))) + } + + #[ignore] + #[tokio::test(flavor = "multi_thread")] + async fn test_sharded_gateway() { + let gateway = Gateway::new(); + + let start = Instant::now(); + let records = gateway + .query( + vec![Channel::from_url( + Url::parse("https://conda.anaconda.org/conda-forge").unwrap(), + )], + vec![Platform::Linux64, Platform::NoArch], + vec![ + // PackageName::from_str("rubin-env").unwrap(), + // PackageName::from_str("jupyterlab").unwrap(), + // PackageName::from_str("detectron2").unwrap(), + PackageName::from_str("python").unwrap(), + PackageName::from_str("boto3").unwrap(), + PackageName::from_str("requests").unwrap(), + ] + .into_iter(), + ) + .recursive(true) + .await + .unwrap(); + let end = Instant::now(); + println!("{} records in {:?}", records.len(), end - start); + + let total_records: usize = records.iter().map(|r| r.len()).sum(); + assert_eq!(total_records, 84242); + } +} diff --git a/crates/rattler_repodata_gateway/src/gateway/query.rs b/crates/rattler_repodata_gateway/src/gateway/query.rs new file mode 100644 index 000000000..866fabfb9 --- /dev/null +++ b/crates/rattler_repodata_gateway/src/gateway/query.rs @@ -0,0 +1,221 @@ +use super::{subdir::Subdir, BarrierCell, GatewayError, GatewayInner, RepoData}; +use crate::Reporter; +use futures::{select_biased, stream::FuturesUnordered, FutureExt, StreamExt}; +use itertools::Itertools; +use rattler_conda_types::{Channel, MatchSpec, PackageName, Platform}; +use std::{ + collections::{HashMap, HashSet}, + future::IntoFuture, + sync::Arc, +}; + +/// Represents a query to execute with a [`Gateway`]. +/// +/// When executed the query will asynchronously load the repodata from all +/// subdirectories (combination of channels and platforms). +/// +/// Most processing will happen on the background so downloading and parsing +/// can happen simultaneously. +/// +/// Repodata is cached by the [`Gateway`] so executing the same query twice +/// with the same channels will not result in the repodata being fetched +/// twice. +#[derive(Clone)] +pub struct GatewayQuery { + /// The gateway that manages all resources + gateway: Arc, + + /// The channels to fetch from + channels: Vec, + + /// The platforms the fetch from + platforms: Vec, + + /// The specs to fetch records for + specs: Vec, + + /// Whether to recursively fetch dependencies + recursive: bool, + + /// The reporter to use by the query. + reporter: Option>, +} + +impl GatewayQuery { + /// Constructs a new instance. This should not be called directly, use + /// [`Gateway::query`] instead. + pub(super) fn new( + gateway: Arc, + channels: Vec, + platforms: Vec, + specs: Vec, + ) -> Self { + Self { + gateway, + channels, + platforms, + specs, + + recursive: false, + reporter: None, + } + } + + /// Sets whether the query should be recursive. If recursive is set to true + /// the query will also recursively fetch the dependencies of the packages + /// that match the root specs. + /// + /// Only the dependencies of the records that match the root specs will be + /// fetched. + #[must_use] + pub fn recursive(self, recursive: bool) -> Self { + Self { recursive, ..self } + } + + /// Sets the reporter to use for this query. + /// + /// The reporter is notified of important evens during the execution of the + /// query. This allows reporting progress back to a user. + pub fn with_reporter(self, reporter: impl Reporter + 'static) -> Self { + Self { + reporter: Some(Arc::new(reporter)), + ..self + } + } + + /// Execute the query and return the resulting repodata records. + pub async fn execute(self) -> Result, GatewayError> { + // Collect all the channels and platforms together + let channels_and_platforms = self + .channels + .iter() + .enumerate() + .cartesian_product(self.platforms.into_iter()) + .collect_vec(); + + // Create barrier cells for each subdirectory. This can be used to wait until the subdir + // becomes available. + let mut subdirs = Vec::with_capacity(channels_and_platforms.len()); + let mut pending_subdirs = FuturesUnordered::new(); + for ((channel_idx, channel), platform) in channels_and_platforms { + // Create a barrier so work that need this subdir can await it. + let barrier = Arc::new(BarrierCell::new()); + subdirs.push((channel_idx, barrier.clone())); + + let inner = self.gateway.clone(); + let reporter = self.reporter.clone(); + pending_subdirs.push(async move { + match inner + .get_or_create_subdir(channel, platform, reporter) + .await + { + Ok(subdir) => { + barrier.set(subdir).expect("subdir was set twice"); + Ok(()) + } + Err(e) => Err(e), + } + }); + } + + // Package names that we have or will issue requests for. + let mut seen = HashSet::new(); + let mut pending_package_specs = HashMap::new(); + for spec in self.specs { + if let Some(name) = &spec.name { + seen.insert(name.clone()); + pending_package_specs + .entry(name.clone()) + .or_insert_with(Vec::new) + .push(spec); + } + } + + // A list of futures to fetch the records for the pending package names. The main task + // awaits these futures. + let mut pending_records = FuturesUnordered::new(); + + // The resulting list of repodata records. + let mut result = vec![RepoData::default(); self.channels.len()]; + + // Loop until all pending package names have been fetched. + loop { + // Iterate over all pending package names and create futures to fetch them from all + // subdirs. + for (package_name, specs) in pending_package_specs.drain() { + for (channel_idx, subdir) in subdirs.iter().cloned() { + let specs = specs.clone(); + let package_name = package_name.clone(); + let reporter = self.reporter.clone(); + pending_records.push(async move { + let barrier_cell = subdir.clone(); + let subdir = barrier_cell.wait().await; + match subdir.as_ref() { + Subdir::Found(subdir) => subdir + .get_or_fetch_package_records(&package_name, reporter) + .await + .map(|records| (channel_idx, specs, records)), + Subdir::NotFound => Ok((channel_idx, specs, Arc::from(vec![]))), + } + }); + } + } + + // Wait for the subdir to become available. + select_biased! { + // Handle any error that was emitted by the pending subdirs. + subdir_result = pending_subdirs.select_next_some() => { + subdir_result?; + } + + // Handle any records that were fetched + records = pending_records.select_next_some() => { + let (channel_idx, request_specs, records) = records?; + + if self.recursive { + // Extract the dependencies from the records and recursively add them to the + // list of package names that we need to fetch. + for record in records.iter() { + if !request_specs.iter().any(|spec| spec.matches(&record.package_record)) { + // Do not recurse into records that do not match to root spec. + continue; + } + for dependency in &record.package_record.depends { + let dependency_name = PackageName::new_unchecked( + dependency.split_once(' ').unwrap_or((dependency, "")).0, + ); + if seen.insert(dependency_name.clone()) { + pending_package_specs.insert(dependency_name.clone(), vec![dependency_name.into()]); + } + } + } + } + + // Add the records to the result + if records.len() > 0 { + let result = &mut result[channel_idx]; + result.len += records.len(); + result.shards.push(records); + } + } + + // All futures have been handled, all subdirectories have been loaded and all + // repodata records have been fetched. + complete => { + break; + } + } + } + + Ok(result) + } +} + +impl IntoFuture for GatewayQuery { + type Output = Result, GatewayError>; + type IntoFuture = futures::future::BoxFuture<'static, Self::Output>; + + fn into_future(self) -> Self::IntoFuture { + self.execute().boxed() + } +} diff --git a/crates/rattler_repodata_gateway/src/gateway/remote_subdir.rs b/crates/rattler_repodata_gateway/src/gateway/remote_subdir.rs new file mode 100644 index 000000000..749ddc167 --- /dev/null +++ b/crates/rattler_repodata_gateway/src/gateway/remote_subdir.rs @@ -0,0 +1,72 @@ +use super::{local_subdir::LocalSubdirClient, GatewayError, SourceConfig}; +use crate::fetch::{fetch_repo_data, FetchRepoDataError, FetchRepoDataOptions, Variant}; +use crate::gateway::error::SubdirNotFoundError; +use crate::gateway::subdir::SubdirClient; +use crate::Reporter; +use rattler_conda_types::{Channel, PackageName, Platform, RepoDataRecord}; +use reqwest_middleware::ClientWithMiddleware; +use std::{path::PathBuf, sync::Arc}; + +pub struct RemoteSubdirClient { + sparse: LocalSubdirClient, +} + +impl RemoteSubdirClient { + pub async fn new( + channel: Channel, + platform: Platform, + client: ClientWithMiddleware, + cache_dir: PathBuf, + source_config: SourceConfig, + reporter: Option>, + ) -> Result { + let subdir_url = channel.platform_url(platform); + + // Fetch the repodata from the remote server + let repodata = fetch_repo_data( + subdir_url, + client, + cache_dir, + FetchRepoDataOptions { + cache_action: source_config.cache_action, + variant: Variant::default(), + jlap_enabled: source_config.jlap_enabled, + zstd_enabled: source_config.zstd_enabled, + bz2_enabled: source_config.bz2_enabled, + }, + reporter, + ) + .await + .map_err(|e| match e { + FetchRepoDataError::NotFound(e) => { + GatewayError::SubdirNotFoundError(SubdirNotFoundError { + channel: channel.clone(), + subdir: platform.to_string(), + source: e.into(), + }) + } + e => GatewayError::FetchRepoDataError(e), + })?; + + // Create a new sparse repodata client that can be used to read records from the repodata. + let sparse = LocalSubdirClient::from_channel_subdir( + &repodata.repo_data_json_path, + channel.clone(), + platform.as_str(), + ) + .await?; + + Ok(Self { sparse }) + } +} + +#[async_trait::async_trait] +impl SubdirClient for RemoteSubdirClient { + async fn fetch_package_records( + &self, + name: &PackageName, + reporter: Option<&dyn Reporter>, + ) -> Result, GatewayError> { + self.sparse.fetch_package_records(name, reporter).await + } +} diff --git a/crates/rattler_repodata_gateway/src/gateway/repo_data.rs b/crates/rattler_repodata_gateway/src/gateway/repo_data.rs new file mode 100644 index 000000000..eac478f5b --- /dev/null +++ b/crates/rattler_repodata_gateway/src/gateway/repo_data.rs @@ -0,0 +1,89 @@ +use rattler_conda_types::RepoDataRecord; +use std::iter::FusedIterator; +use std::sync::Arc; + +/// A container for [`RepoDataRecord`]s that are returned from the [`super::Gateway`]. +/// +/// This struct references the same memory as the `Gateway` therefor not +/// duplicating the records in memory. +/// +/// `RepoData` uses internal reference counting, therefor it is relatively +/// cheap to clone. +#[derive(Debug, Default, Clone)] +pub struct RepoData { + pub(crate) shards: Vec>, + pub(crate) len: usize, +} + +impl RepoData { + /// Returns an iterator over all the records in this instance. + pub fn iter(&self) -> RepoDataIterator<'_> { + RepoDataIterator { + records: self, + shard_idx: 0, + record_idx: 0, + total: 0, + } + } + + /// Returns the total number of records stored in this instance. + pub fn len(&self) -> usize { + self.len + } + + /// Returns true if there are no records stored in this instance. + pub fn is_empty(&self) -> bool { + self.len == 0 + } +} + +impl<'r> IntoIterator for &'r RepoData { + type Item = &'r RepoDataRecord; + type IntoIter = RepoDataIterator<'r>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +/// An iterator over the records in a [`RepoData`] instance. +pub struct RepoDataIterator<'r> { + records: &'r RepoData, + shard_idx: usize, + record_idx: usize, + total: usize, +} + +impl<'r> Iterator for RepoDataIterator<'r> { + type Item = &'r RepoDataRecord; + + fn next(&mut self) -> Option { + while self.shard_idx < self.records.shards.len() { + let shard = &self.records.shards[self.shard_idx]; + if self.record_idx < shard.len() { + let record = &shard[self.record_idx]; + self.record_idx += 1; + self.total += 1; + return Some(record); + } else { + self.shard_idx += 1; + self.record_idx = 0; + } + } + + None + } + + fn size_hint(&self) -> (usize, Option) { + let remaining = self.records.len - self.total; + (remaining, Some(remaining)) + } +} + +impl FusedIterator for RepoDataIterator<'_> {} + +impl ExactSizeIterator for RepoDataIterator<'_> { + fn len(&self) -> usize { + self.records.len - self.total + } +} diff --git a/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/index.rs b/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/index.rs new file mode 100644 index 000000000..acfc38124 --- /dev/null +++ b/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/index.rs @@ -0,0 +1,361 @@ +use super::{token::TokenClient, ShardedRepodata}; +use crate::reporter::ResponseReporterExt; +use crate::utils::run_blocking_task; +use crate::{utils::url_to_cache_filename, GatewayError, Reporter}; +use bytes::Bytes; +use futures::{FutureExt, TryFutureExt}; +use http::{HeaderMap, Method, Uri}; +use http_cache_semantics::{AfterResponse, BeforeRequest, CachePolicy, RequestLike}; +use reqwest::Response; +use reqwest_middleware::ClientWithMiddleware; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use std::{io::Write, path::Path, str::FromStr, time::SystemTime}; +use tempfile::NamedTempFile; +use tokio::fs::File; +use tokio::io::{AsyncReadExt, BufReader}; +use url::Url; + +/// Magic number that identifies the cache file format. +const MAGIC_NUMBER: &[u8] = b"SHARD-CACHE-V1"; + +const REPODATA_SHARDS_FILENAME: &str = "repodata_shards.msgpack.zst"; + +// Fetches the shard index from the url or read it from the cache. +pub async fn fetch_index( + client: ClientWithMiddleware, + channel_base_url: &Url, + token_client: &TokenClient, + cache_dir: &Path, + concurrent_requests_semaphore: Arc, + reporter: Option<&dyn Reporter>, +) -> Result { + async fn from_response( + cache_path: &Path, + policy: CachePolicy, + response: Response, + reporter: Option<(&dyn Reporter, usize)>, + ) -> Result { + // Read the bytes of the response + let response_url = response.url().clone(); + let bytes = response.bytes_with_progress(reporter).await?; + + if let Some((reporter, index)) = reporter { + reporter.on_download_complete(&response_url, index); + } + + // Decompress the bytes + let decoded_bytes = Bytes::from(super::decode_zst_bytes_async(bytes).await?); + + // Write the cache to disk if the policy allows it. + let cache_fut = if policy.is_storable() { + write_shard_index_cache(cache_path, policy, decoded_bytes.clone()) + .map_ok(Some) + .map_err(|e| { + GatewayError::IoError( + format!( + "failed to create temporary file to cache shard index to {}", + cache_path.display() + ), + e, + ) + }) + .left_future() + } else { + // Otherwise delete the file + tokio::fs::remove_file(cache_path) + .map_ok_or_else( + |e| { + if e.kind() == std::io::ErrorKind::NotFound { + Ok(None) + } else { + Err(GatewayError::IoError( + format!( + "failed to remove cached shard index at {}", + cache_path.display() + ), + e, + )) + } + }, + |_| Ok(None), + ) + .right_future() + }; + + // Parse the bytes + let parse_fut = run_blocking_task(move || { + rmp_serde::from_slice(&decoded_bytes) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())) + .map_err(|e| { + GatewayError::IoError( + format!("failed to parse shard index from {response_url}"), + e, + ) + }) + }); + + // Parse and write the file to disk concurrently + let (temp_file, sharded_index) = tokio::try_join!(cache_fut, parse_fut)?; + + // Persist the cache if successfully updated the cache. + if let Some(temp_file) = temp_file { + temp_file.persist(cache_path).map_err(|e| { + GatewayError::IoError( + format!("failed to persist shard index to {}", cache_path.display()), + e.into(), + ) + })?; + } + + Ok(sharded_index) + } + + // Fetch the sharded repodata from the remote server + let canonical_shards_url = channel_base_url + .join(REPODATA_SHARDS_FILENAME) + .expect("invalid shard base url"); + + let cache_file_name = format!( + "{}.shards-cache-v1", + url_to_cache_filename(&canonical_shards_url) + ); + let cache_path = cache_dir.join(cache_file_name); + + let canonical_request = SimpleRequest::get(&canonical_shards_url); + + // Try reading the cached file + if let Ok((cache_header, file)) = read_cached_index(&cache_path).await { + match cache_header + .policy + .before_request(&canonical_request, SystemTime::now()) + { + BeforeRequest::Fresh(_) => { + if let Ok(shard_index) = read_shard_index_from_reader(file).await { + tracing::debug!("shard index cache hit"); + return Ok(shard_index); + } + } + BeforeRequest::Stale { + request: state_request, + .. + } => { + // Get the token from the token client + let token = token_client.get_token(reporter).await?; + + // Determine the actual URL to use for the request + let shards_url = token + .shard_base_url + .as_ref() + .unwrap_or(channel_base_url) + .join(REPODATA_SHARDS_FILENAME) + .expect("invalid shard base url"); + + // Construct the actual request that we will send + let mut request = client + .get(shards_url.clone()) + .headers(state_request.headers().clone()) + .build() + .expect("failed to build request for shard index"); + token.add_to_headers(request.headers_mut()); + + // Acquire a permit to do a request + let _request_permit = concurrent_requests_semaphore.acquire().await; + + // Send the request + let download_reporter = reporter.map(|r| (r, r.on_download_start(&shards_url))); + let response = client.execute(request).await?; + + match cache_header.policy.after_response( + &state_request, + &response, + SystemTime::now(), + ) { + AfterResponse::NotModified(_policy, _) => { + // The cached file is still valid + match read_shard_index_from_reader(file).await { + Ok(shard_index) => { + tracing::debug!("shard index cache was not modified"); + // If reading the file failed for some reason we'll just fetch it again. + return Ok(shard_index); + } + Err(e) => { + tracing::warn!("the cached shard index has been corrupted: {e}"); + if let Some((reporter, index)) = download_reporter { + reporter.on_download_complete(response.url(), index); + } + } + } + } + AfterResponse::Modified(policy, _) => { + // Close the old file so we can create a new one. + drop(file); + + tracing::debug!("shard index cache has become stale"); + return from_response(&cache_path, policy, response, download_reporter) + .await; + } + } + } + } + }; + + tracing::debug!("fetching fresh shard index"); + + // Get the token from the token client + let token = token_client.get_token(reporter).await?; + + // Determine the actual URL to use for the request + let shards_url = token + .shard_base_url + .as_ref() + .unwrap_or(channel_base_url) + .join(REPODATA_SHARDS_FILENAME) + .expect("invalid shard base url"); + + // Construct the actual request that we will send + let mut request = client + .get(shards_url.clone()) + .build() + .expect("failed to build request for shard index"); + token.add_to_headers(request.headers_mut()); + + // Acquire a permit to do a request + let _request_permit = concurrent_requests_semaphore.acquire().await; + + // Do a fresh requests + let reporter = reporter.map(|r| (r, r.on_download_start(&shards_url))); + let response = client + .execute( + request + .try_clone() + .expect("failed to clone initial request"), + ) + .await?; + + let policy = CachePolicy::new(&canonical_request, &response); + from_response(&cache_path, policy, response, reporter).await +} + +/// Writes the shard index cache to disk. +async fn write_shard_index_cache( + cache_path: &Path, + policy: CachePolicy, + decoded_bytes: Bytes, +) -> std::io::Result { + let cache_path = cache_path.to_path_buf(); + tokio::task::spawn_blocking(move || { + // Write the header + let cache_header = rmp_serde::encode::to_vec(&CacheHeader { policy }) + .expect("failed to encode cache header"); + let cache_dir = cache_path + .parent() + .expect("the cache path must have a parent"); + std::fs::create_dir_all(cache_dir)?; + let mut temp_file = tempfile::Builder::new() + .tempfile_in(cache_dir) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?; + temp_file.write_all(MAGIC_NUMBER)?; + temp_file.write_all(&(cache_header.len() as u32).to_le_bytes())?; + temp_file.write_all(&cache_header)?; + temp_file.write_all(decoded_bytes.as_ref())?; + + Ok(temp_file) + }) + .map_err(|e| match e.try_into_panic() { + Ok(payload) => std::panic::resume_unwind(payload), + Err(e) => std::io::Error::new(std::io::ErrorKind::Other, e), + }) + .await? +} + +/// Read the shard index from a reader and deserialize it. +async fn read_shard_index_from_reader( + mut reader: BufReader, +) -> Result { + // Read the file to memory + let mut bytes = Vec::new(); + reader + .read_to_end(&mut bytes) + .await + .map_err(|e| GatewayError::IoError("failed to read shard index buffer".to_string(), e))?; + + // Deserialize the bytes + run_blocking_task(move || { + rmp_serde::from_slice(&bytes) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())) + .map_err(|e| GatewayError::IoError("failed to parse shard index".to_string(), e)) + }) + .await +} + +/// Cache information stored at the start of the cache file. +#[derive(Clone, Debug, Serialize, Deserialize)] +struct CacheHeader { + pub policy: CachePolicy, +} + +/// Try reading the cache file from disk. +async fn read_cached_index(cache_path: &Path) -> std::io::Result<(CacheHeader, BufReader)> { + // Open the file for reading + let file = File::open(cache_path).await?; + let mut reader = BufReader::new(file); + + // Read the magic from the file + let mut magic_number = [0; MAGIC_NUMBER.len()]; + reader.read_exact(&mut magic_number).await?; + if magic_number != MAGIC_NUMBER { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "invalid magic number", + )); + } + + // Read the length of the header + let header_length = reader.read_u32_le().await? as usize; + + // Read the header from the file + let mut header_bytes = vec![0; header_length]; + reader.read_exact(&mut header_bytes).await?; + + // Deserialize the header + let cache_header = rmp_serde::from_slice::(&header_bytes) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))?; + + Ok((cache_header, reader)) +} + +/// A helper struct to make it easier to construct something that implements [`RequestLike`]. +struct SimpleRequest { + uri: Uri, + method: Method, + headers: HeaderMap, +} + +impl SimpleRequest { + pub fn get(url: &Url) -> Self { + Self { + uri: Uri::from_str(url.as_str()).expect("failed to convert Url to Uri"), + method: Method::GET, + headers: HeaderMap::default(), + } + } +} + +impl RequestLike for SimpleRequest { + fn method(&self) -> &Method { + &self.method + } + + fn uri(&self) -> Uri { + self.uri.clone() + } + + fn headers(&self) -> &HeaderMap { + &self.headers + } + + fn is_same_uri(&self, other: &Uri) -> bool { + &self.uri() == other + } +} diff --git a/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/mod.rs b/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/mod.rs new file mode 100644 index 000000000..f1e279c4c --- /dev/null +++ b/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/mod.rs @@ -0,0 +1,234 @@ +use crate::gateway::error::SubdirNotFoundError; +use crate::reporter::ResponseReporterExt; +use crate::utils::run_blocking_task; +use crate::Reporter; +use crate::{fetch::FetchRepoDataError, gateway::subdir::SubdirClient, GatewayError}; +use futures::TryFutureExt; +use http::header::CACHE_CONTROL; +use http::{HeaderValue, StatusCode}; +use rattler_conda_types::{Channel, PackageName, RepoDataRecord, Shard, ShardedRepodata}; +use reqwest_middleware::ClientWithMiddleware; +use std::{borrow::Cow, path::PathBuf, sync::Arc}; +use token::TokenClient; +use url::Url; + +mod index; +mod token; + +pub struct ShardedSubdir { + channel: Channel, + client: ClientWithMiddleware, + shard_base_url: Url, + token_client: TokenClient, + sharded_repodata: ShardedRepodata, + cache_dir: PathBuf, + concurrent_requests_semaphore: Arc, +} + +impl ShardedSubdir { + pub async fn new( + channel: Channel, + subdir: String, + client: ClientWithMiddleware, + cache_dir: PathBuf, + concurrent_requests_semaphore: Arc, + reporter: Option<&dyn Reporter>, + ) -> Result { + // Construct the base url for the shards (e.g. `/`). + let shard_base_url = add_trailing_slash(channel.base_url()) + .join(&format!("{subdir}/")) + .expect("invalid subdir url"); + + // Construct a token client to fetch the token when we need it. + let token_client = TokenClient::new( + client.clone(), + shard_base_url.clone(), + concurrent_requests_semaphore.clone(), + ); + + // Fetch the shard index + let sharded_repodata = index::fetch_index( + client.clone(), + &shard_base_url, + &token_client, + &cache_dir, + concurrent_requests_semaphore.clone(), + reporter, + ) + .await + .map_err(|e| match e { + GatewayError::ReqwestError(e) if e.status() == Some(StatusCode::NOT_FOUND) => { + GatewayError::SubdirNotFoundError(SubdirNotFoundError { + channel: channel.clone(), + subdir, + source: e.into(), + }) + } + e => e, + })?; + + // Determine the cache directory and make sure it exists. + let cache_dir = cache_dir.join("shards-v1"); + tokio::fs::create_dir_all(&cache_dir) + .await + .map_err(FetchRepoDataError::IoError)?; + + Ok(Self { + channel, + client, + shard_base_url, + token_client, + sharded_repodata, + cache_dir, + concurrent_requests_semaphore, + }) + } +} + +#[async_trait::async_trait] +impl SubdirClient for ShardedSubdir { + async fn fetch_package_records( + &self, + name: &PackageName, + reporter: Option<&dyn Reporter>, + ) -> Result, GatewayError> { + // Find the shard that contains the package + let Some(shard) = self.sharded_repodata.shards.get(name.as_normalized()) else { + return Ok(vec![].into()); + }; + + // Check if we already have the shard in the cache. + let shard_cache_path = self.cache_dir.join(format!("{shard:x}.msgpack")); + + // Read the cached shard + match tokio::fs::read(&shard_cache_path).await { + Ok(cached_bytes) => { + // Decode the cached shard + return parse_records( + cached_bytes, + self.channel.canonical_name(), + self.sharded_repodata.info.base_url.clone(), + ) + .await + .map(Arc::from); + } + Err(err) if err.kind() == std::io::ErrorKind::NotFound => { + // The file is missing from the cache, we need to download it. + } + Err(err) => return Err(FetchRepoDataError::IoError(err).into()), + } + + // Get the token + let token = self.token_client.get_token(reporter).await?; + + // Download the shard + let shard_url = token + .shard_base_url + .as_ref() + .unwrap_or(&self.shard_base_url) + .join(&format!("shards/{shard:x}.msgpack.zst")) + .expect("invalid shard url"); + + let mut shard_request = self + .client + .get(shard_url.clone()) + .header(CACHE_CONTROL, HeaderValue::from_static("no-store")) + .build() + .expect("failed to build shard request"); + token.add_to_headers(shard_request.headers_mut()); + + let shard_bytes = { + let _permit = self.concurrent_requests_semaphore.acquire(); + let reporter = reporter.map(|r| (r, r.on_download_start(&shard_url))); + let shard_response = self + .client + .execute(shard_request) + .await + .and_then(|r| r.error_for_status().map_err(Into::into)) + .map_err(FetchRepoDataError::from)?; + + let bytes = shard_response + .bytes_with_progress(reporter) + .await + .map_err(FetchRepoDataError::from)?; + + if let Some((reporter, index)) = reporter { + reporter.on_download_complete(&shard_url, index); + } + + bytes + }; + + let shard_bytes = decode_zst_bytes_async(shard_bytes).await?; + + // Create a future to write the cached bytes to disk + let write_to_cache_fut = tokio::fs::write(&shard_cache_path, shard_bytes.clone()) + .map_err(FetchRepoDataError::IoError) + .map_err(GatewayError::from); + + // Create a future to parse the records from the shard + let parse_records_fut = parse_records( + shard_bytes, + self.channel.canonical_name(), + self.sharded_repodata.info.base_url.clone(), + ); + + // Await both futures concurrently. + let (_, records) = tokio::try_join!(write_to_cache_fut, parse_records_fut)?; + + Ok(records.into()) + } +} + +async fn decode_zst_bytes_async + Send + 'static>( + bytes: R, +) -> Result, GatewayError> { + run_blocking_task(move || match zstd::decode_all(bytes.as_ref()) { + Ok(decoded) => Ok(decoded), + Err(err) => Err(GatewayError::IoError( + "failed to decode zstd shard".to_string(), + err, + )), + }) + .await +} + +async fn parse_records + Send + 'static>( + bytes: R, + channel_name: String, + base_url: Url, +) -> Result, GatewayError> { + run_blocking_task(move || { + // let shard = serde_json::from_slice::(bytes.as_ref()).map_err(std::io::Error::from)?; + let shard = rmp_serde::from_slice::(bytes.as_ref()) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())) + .map_err(FetchRepoDataError::IoError)?; + let packages = + itertools::chain(shard.packages.into_iter(), shard.conda_packages.into_iter()) + .filter(|(name, _record)| !shard.removed.contains(name)); + let base_url = add_trailing_slash(&base_url); + Ok(packages + .map(|(file_name, package_record)| RepoDataRecord { + url: base_url + .join(&file_name) + .expect("filename is not a valid url"), + channel: channel_name.clone(), + package_record, + file_name, + }) + .collect()) + }) + .await +} + +/// Returns the URL with a trailing slash if it doesn't already have one. +fn add_trailing_slash(url: &Url) -> Cow<'_, Url> { + let path = url.path(); + if path.ends_with('/') { + Cow::Borrowed(url) + } else { + let mut url = url.clone(); + url.set_path(&format!("{path}/")); + Cow::Owned(url) + } +} diff --git a/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/token.rs b/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/token.rs new file mode 100644 index 000000000..bd4a98c51 --- /dev/null +++ b/crates/rattler_repodata_gateway/src/gateway/sharded_subdir/token.rs @@ -0,0 +1,166 @@ +use crate::reporter::ResponseReporterExt; +use crate::Reporter; +use crate::{fetch::FetchRepoDataError, gateway::PendingOrFetched, GatewayError}; +use chrono::{DateTime, TimeDelta, Utc}; +use http::header::CACHE_CONTROL; +use http::HeaderValue; +use itertools::Either; +use parking_lot::Mutex; +use reqwest_middleware::ClientWithMiddleware; +use serde::{Deserialize, Serialize}; +use std::ops::Add; +use std::sync::Arc; +use url::Url; + +/// A simple client that makes it simple to fetch a token from the token endpoint. +pub struct TokenClient { + client: ClientWithMiddleware, + token_base_url: Url, + token: Arc>>>>, + concurrent_request_semaphore: Arc, +} + +impl TokenClient { + pub fn new( + client: ClientWithMiddleware, + token_base_url: Url, + concurrent_request_semaphore: Arc, + ) -> Self { + Self { + client, + token_base_url, + token: Arc::new(Mutex::new(PendingOrFetched::Fetched(None))), + concurrent_request_semaphore, + } + } + + /// Returns the current token or fetches a new one if the current one is expired. + pub async fn get_token( + &self, + reporter: Option<&dyn Reporter>, + ) -> Result, GatewayError> { + let sender_or_receiver = { + let mut token = self.token.lock(); + match &*token { + PendingOrFetched::Fetched(Some(token)) if token.is_fresh() => { + // The token is still fresh. + return Ok(token.clone()); + } + PendingOrFetched::Fetched(_) => { + let (sender, _) = tokio::sync::broadcast::channel(1); + let sender = Arc::new(sender); + *token = PendingOrFetched::Pending(Arc::downgrade(&sender)); + + Either::Left(sender) + } + PendingOrFetched::Pending(sender) => { + let sender = sender.upgrade(); + if let Some(sender) = sender { + Either::Right(sender.subscribe()) + } else { + let (sender, _) = tokio::sync::broadcast::channel(1); + let sender = Arc::new(sender); + *token = PendingOrFetched::Pending(Arc::downgrade(&sender)); + Either::Left(sender) + } + } + } + }; + + let sender = match sender_or_receiver { + Either::Left(sender) => sender, + Either::Right(mut receiver) => { + return match receiver.recv().await { + Ok(Some(token)) => Ok(token), + _ => { + // If this happens the sender was dropped. + Err(GatewayError::IoError( + "a coalesced request for a token failed".to_string(), + std::io::ErrorKind::Other.into(), + )) + } + }; + } + }; + + let token_url = self + .token_base_url + .join("token") + .expect("invalid token url"); + tracing::debug!("fetching token from {}", &token_url); + + // Fetch the token + let token = { + let _permit = self.concurrent_request_semaphore.acquire().await; + let reporter = reporter.map(|r| (r, r.on_download_start(&token_url))); + let response = self + .client + .get(token_url.clone()) + .header(CACHE_CONTROL, HeaderValue::from_static("max-age=0")) + .send() + .await + .and_then(|r| r.error_for_status().map_err(Into::into)) + .map_err(GatewayError::from)?; + + let bytes = response + .bytes_with_progress(reporter) + .await + .map_err(FetchRepoDataError::from) + .map_err(GatewayError::from)?; + + if let Some((reporter, index)) = reporter { + reporter.on_download_complete(&token_url, index); + } + + let mut token: Token = serde_json::from_slice(&bytes).map_err(|e| { + GatewayError::IoError("failed to parse sharded index token".to_string(), e.into()) + })?; + + // Ensure that the issued_at field is set. + token.issued_at.get_or_insert_with(Utc::now); + + Arc::new(token) + }; + + // Reacquire the token + let mut token_lock = self.token.lock(); + *token_lock = PendingOrFetched::Fetched(Some(token.clone())); + + // Publish the change + let _ = sender.send(Some(token.clone())); + + Ok(token) + } +} + +/// The token endpoint response. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Token { + pub token: Option, + issued_at: Option>, + expires_in: Option, + pub shard_base_url: Option, +} + +impl Token { + /// Returns true if the token is still considered to be valid. + pub fn is_fresh(&self) -> bool { + if let (Some(issued_at), Some(expires_in)) = (&self.issued_at, self.expires_in) { + let now = Utc::now(); + if issued_at.add(TimeDelta::seconds(expires_in as i64)) > now { + return false; + } + } + true + } + + /// Add the token to the headers if its available + pub fn add_to_headers(&self, headers: &mut http::header::HeaderMap) { + if let Some(token) = &self.token { + headers.insert( + http::header::AUTHORIZATION, + HeaderValue::from_str(&format!("Bearer {token}")).unwrap(), + ); + } + } +} diff --git a/crates/rattler_repodata_gateway/src/gateway/subdir.rs b/crates/rattler_repodata_gateway/src/gateway/subdir.rs new file mode 100644 index 000000000..df57352e5 --- /dev/null +++ b/crates/rattler_repodata_gateway/src/gateway/subdir.rs @@ -0,0 +1,149 @@ +use super::GatewayError; +use crate::gateway::PendingOrFetched; +use crate::Reporter; +use dashmap::mapref::entry::Entry; +use dashmap::DashMap; +use rattler_conda_types::{PackageName, RepoDataRecord}; +use std::sync::Arc; +use tokio::{sync::broadcast, task::JoinError}; + +pub enum Subdir { + /// The subdirectory is missing from the channel, it is considered empty. + NotFound, + + /// A subdirectory and the data associated with it. + Found(SubdirData), +} + +/// Fetches and caches repodata records by package name for a specific subdirectory of a channel. +pub struct SubdirData { + /// The client to use to fetch repodata. + client: Arc, + + /// Previously fetched or currently pending records. + records: DashMap>>, +} + +impl SubdirData { + pub fn from_client(client: C) -> Self { + Self { + client: Arc::new(client), + records: DashMap::default(), + } + } + + pub async fn get_or_fetch_package_records( + &self, + name: &PackageName, + reporter: Option>, + ) -> Result, GatewayError> { + let sender = match self.records.entry(name.clone()) { + Entry::Vacant(entry) => { + // Construct a sender so other tasks can subscribe + let (sender, _) = broadcast::channel(1); + let sender = Arc::new(sender); + + // Modify the current entry to the pending entry, this is an atomic operation + // because who holds the entry holds mutable access. + entry.insert(PendingOrFetched::Pending(Arc::downgrade(&sender))); + + sender + } + Entry::Occupied(mut entry) => { + let records = entry.get(); + match records { + PendingOrFetched::Pending(sender) => { + let sender = sender.upgrade(); + + if let Some(sender) = sender { + // Create a receiver before we drop the entry. While we hold on to + // the entry we have exclusive access to it, this means the task + // currently fetching the package will not be able to store a value + // until we drop the entry. + // By creating the receiver here we ensure that we are subscribed + // before the other tasks sends a value over the channel. + let mut receiver = sender.subscribe(); + + // Explicitly drop the entry, so we don't block any other tasks. + drop(entry); + + // The sender is still active, so we can wait for the records to be + // fetched. + return match receiver.recv().await { + Ok(records) => Ok(records), + Err(_) => { + // If this happens the sender was dropped. We simply have to + // retry. + Err(GatewayError::IoError( + "a coalesced request failed".to_string(), + std::io::ErrorKind::Other.into(), + )) + } + }; + } else { + // Construct a sender so other tasks can subscribe + let (sender, _) = broadcast::channel(1); + let sender = Arc::new(sender); + + // Modify the current entry to the pending entry, this is an atomic + // operation because who holds the entry holds mutable access. + entry.insert(PendingOrFetched::Pending(Arc::downgrade(&sender))); + + sender + } + } + PendingOrFetched::Fetched(records) => return Ok(records.clone()), + } + } + }; + + // At this point we have exclusive write access to this specific entry. All other tasks + // will find a pending entry and will wait for the records to become available. + // + // Let's start by fetching the records. If an error occurs we immediately return the error. + // This will drop the sender and all other waiting tasks will receive an error. + let records = match tokio::spawn({ + let client = self.client.clone(); + let name = name.clone(); + async move { + client + .fetch_package_records(&name, reporter.as_deref()) + .await + } + }) + .await + .map_err(JoinError::try_into_panic) + { + Ok(Ok(records)) => records, + Ok(Err(err)) => return Err(err), + Err(Ok(panic)) => std::panic::resume_unwind(panic), + Err(Err(_)) => { + return Err(GatewayError::IoError( + "fetching records was cancelled".to_string(), + std::io::ErrorKind::Interrupted.into(), + )); + } + }; + + // Store the fetched files in the entry. + self.records + .insert(name.clone(), PendingOrFetched::Fetched(records.clone())); + + // Send the records to all waiting tasks. We don't care if there are no receivers so we + // drop the error. + let _ = sender.send(records.clone()); + + Ok(records) + } +} + +/// A client that can be used to fetch repodata for a specific subdirectory. +#[async_trait::async_trait] +pub trait SubdirClient: Send + Sync { + /// Fetches all repodata records for the package with the given name in a channel subdirectory. + async fn fetch_package_records( + &self, + name: &PackageName, + reporter: Option<&dyn Reporter>, + ) -> Result, GatewayError>; +} diff --git a/crates/rattler_repodata_gateway/src/lib.rs b/crates/rattler_repodata_gateway/src/lib.rs index 9fabf6575..e86eaf556 100644 --- a/crates/rattler_repodata_gateway/src/lib.rs +++ b/crates/rattler_repodata_gateway/src/lib.rs @@ -49,7 +49,7 @@ //! let result = match result { //! Err(err) => { //! panic!("{:?}", err); -//! }, +//! } //! Ok(result) => result //! }; //! @@ -61,7 +61,14 @@ //! ``` pub mod fetch; +mod reporter; #[cfg(feature = "sparse")] pub mod sparse; - mod utils; +pub use reporter::Reporter; + +#[cfg(feature = "gateway")] +mod gateway; + +#[cfg(feature = "gateway")] +pub use gateway::{ChannelConfig, Gateway, GatewayBuilder, GatewayError, RepoData, SourceConfig}; diff --git a/crates/rattler_repodata_gateway/src/reporter.rs b/crates/rattler_repodata_gateway/src/reporter.rs new file mode 100644 index 000000000..6a8c8ccdd --- /dev/null +++ b/crates/rattler_repodata_gateway/src/reporter.rs @@ -0,0 +1,114 @@ +use crate::utils::BodyStreamExt; +use bytes::Bytes; +use futures::{Stream, TryStreamExt}; +use std::future::Future; +use url::Url; + +/// A trait that enables being notified of download progress. +pub trait Reporter: Send + Sync { + /// Called when a download of a file started. + /// + /// Returns an index that can be used to identify the download in subsequent calls. + fn on_download_start(&self, _url: &Url) -> usize { + 0 + } + + /// Called when the download of a file makes any progress. + /// + /// The `total_bytes` parameter is `None` if the total size of the file is unknown. + /// + /// The `index` parameter is the index returned by `on_download_start`. + fn on_download_progress( + &self, + _url: &Url, + _index: usize, + _bytes_downloaded: usize, + _total_bytes: Option, + ) { + } + + /// Called when the download of a file finished. + /// + /// The `index` parameter is the index returned by `on_download_start`. + fn on_download_complete(&self, _url: &Url, _index: usize) {} + + /// Called when starting to apply JLAP to existing repodata. + /// + /// This function should return a unique index that can be used to + /// identify the subsequent JLAP operation. + fn on_jlap_start(&self) -> usize { + 0 + } + + /// Called when reading and decoding the repodata started. + fn on_jlap_decode_start(&self, _index: usize) {} + + /// Called when reading and decoding the repodata completed. + fn on_jlap_decode_completed(&self, _index: usize) {} + + /// Called when starting to apply a JLAP patch. + fn on_jlap_apply_patch(&self, _index: usize, _patch_index: usize, _total: usize) {} + + /// Called when all JLAP patches have been applied. + fn on_jlap_apply_patches_completed(&self, _index: usize) {} + + /// Called when reading and decoding the repodata started. + fn on_jlap_encode_start(&self, _index: usize) {} + + /// Called when reading and decoding the repodata completed. + fn on_jlap_encode_completed(&self, _index: usize) {} + + /// Called when finished applying JLAP to existing repodata. + fn on_jlap_completed(&self, _index: usize) {} +} + +pub(crate) trait ResponseReporterExt { + /// Converts a response into a stream of bytes, notifying a reporter of the progress. + fn byte_stream_with_progress( + self, + reporter: Option<(&dyn Reporter, usize)>, + ) -> impl Stream>; + + /// Reads all the bytes from a stream and notifies a reporter of the progress. + fn bytes_with_progress( + self, + reporter: Option<(&dyn Reporter, usize)>, + ) -> impl Future>>; + + /// Reads all the bytes from a stream and convert it to text and notifies a reporter of the progress. + fn text_with_progress( + self, + reporter: Option<(&dyn Reporter, usize)>, + ) -> impl Future>; +} + +impl ResponseReporterExt for reqwest::Response { + fn byte_stream_with_progress( + self, + reporter: Option<(&dyn Reporter, usize)>, + ) -> impl Stream> { + let total_size = self.content_length().map(|len| len as usize); + let url = self.url().clone(); + let mut bytes_read = 0; + self.bytes_stream().inspect_ok(move |bytes| { + if let Some((reporter, index)) = reporter { + bytes_read += bytes.len(); + reporter.on_download_progress(&url, index, bytes_read, total_size); + } + }) + } + + fn bytes_with_progress( + self, + reporter: Option<(&dyn Reporter, usize)>, + ) -> impl Future>> { + self.byte_stream_with_progress(reporter).bytes() + } + + fn text_with_progress( + self, + reporter: Option<(&dyn Reporter, usize)>, + ) -> impl Future> { + self.byte_stream_with_progress(reporter).text() + } +} diff --git a/crates/rattler_repodata_gateway/src/sparse/mod.rs b/crates/rattler_repodata_gateway/src/sparse/mod.rs index 284c0c0e6..e7b5571af 100644 --- a/crates/rattler_repodata_gateway/src/sparse/mod.rs +++ b/crates/rattler_repodata_gateway/src/sparse/mod.rs @@ -550,6 +550,18 @@ mod test { assert_eq!(total_records, 21732); } + #[tokio::test] + async fn test_sparse_rubin_env() { + let sparse_empty_data = load_sparse(["rubin-env"]).await; + + let total_records = sparse_empty_data + .iter() + .map(std::vec::Vec::len) + .sum::(); + + assert_eq!(total_records, 45060); + } + #[tokio::test] async fn test_sparse_numpy_dev() { let package_names = vec![ diff --git a/crates/rattler_repodata_gateway/src/utils/body.rs b/crates/rattler_repodata_gateway/src/utils/body.rs new file mode 100644 index 000000000..1d7e15add --- /dev/null +++ b/crates/rattler_repodata_gateway/src/utils/body.rs @@ -0,0 +1,75 @@ +use bytes::Bytes; +use futures::Stream; +use pin_project_lite::pin_project; +use std::{ + collections::VecDeque, + future::Future, + marker::PhantomData, + pin::Pin, + task::{Context, Poll}, +}; + +/// A helper trait to convert a stream of bytes coming from a request body into +/// another type. +pub trait BodyStreamExt: Sized { + fn bytes(self) -> BytesCollect; + + /// Read the contents of a body stream as text. + async fn text(self) -> Result; +} + +impl>> BodyStreamExt for S { + fn bytes(self) -> BytesCollect { + BytesCollect::new(self) + } + + async fn text(self) -> Result { + let full = self.bytes().await?; + let text = String::from_utf8_lossy(&full); + Ok(text.into_owned()) + } +} + +pin_project! { + #[project = BytesCollectProj] + pub struct BytesCollect { + #[pin] + stream: S, + bytes: VecDeque, + _err: PhantomData, + } +} + +impl BytesCollect { + pub fn new(stream: S) -> Self { + Self { + stream, + bytes: VecDeque::new(), + _err: PhantomData, + } + } +} + +impl>> Future for BytesCollect { + type Output = Result, E>; + + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let mut this = self.project(); + loop { + match this.stream.as_mut().poll_next(cx) { + Poll::Ready(Some(Ok(chunk))) => { + this.bytes.push_back(chunk); + } + Poll::Ready(Some(Err(e))) => return Poll::Ready(Err(e)), + Poll::Ready(None) => { + let mut result = Vec::with_capacity(this.bytes.iter().map(Bytes::len).sum()); + for chunk in this.bytes.iter() { + result.extend_from_slice(chunk); + } + return Poll::Ready(Ok(result)); + } + Poll::Pending => return Poll::Pending, + } + } + } +} diff --git a/crates/rattler_repodata_gateway/src/utils/mod.rs b/crates/rattler_repodata_gateway/src/utils/mod.rs index 66901ad00..59b5ed0ea 100644 --- a/crates/rattler_repodata_gateway/src/utils/mod.rs +++ b/crates/rattler_repodata_gateway/src/utils/mod.rs @@ -1,13 +1,16 @@ +use ::url::Url; +pub use body::BodyStreamExt; pub use encoding::{AsyncEncoding, Encoding}; pub use flock::LockedFile; use std::fmt::Write; -use url::Url; +use tokio::task::JoinError; mod encoding; #[cfg(test)] pub(crate) mod simple_channel_server; +mod body; mod flock; /// Convert a URL to a cache filename @@ -39,6 +42,27 @@ pub(crate) fn url_to_cache_filename(url: &Url) -> String { result } +/// A marker type that is used to signal that a task was cancelled. +pub(crate) struct Cancelled; + +/// Run a blocking task to completion. If the task is cancelled, the function +/// will return an error converted from `Error`. +pub async fn run_blocking_task(f: F) -> Result +where + F: FnOnce() -> Result + Send + 'static, + T: Send + 'static, + E: From + Send + 'static, +{ + match tokio::task::spawn_blocking(f) + .await + .map_err(JoinError::try_into_panic) + { + Ok(result) => result, + Err(Err(_err)) => Err(E::from(Cancelled)), + Err(Ok(payload)) => std::panic::resume_unwind(payload), + } +} + #[cfg(test)] mod test { use super::url_to_cache_filename; diff --git a/crates/rattler_repodata_gateway/src/utils/simple_channel_server.rs b/crates/rattler_repodata_gateway/src/utils/simple_channel_server.rs index 8b4e51fe8..c98376fcb 100644 --- a/crates/rattler_repodata_gateway/src/utils/simple_channel_server.rs +++ b/crates/rattler_repodata_gateway/src/utils/simple_channel_server.rs @@ -1,7 +1,6 @@ use axum::routing::get_service; -use std::future::IntoFuture; -use std::net::SocketAddr; -use std::path::Path; +use rattler_conda_types::Channel; +use std::{future::IntoFuture, net::SocketAddr, path::Path}; use tokio::sync::oneshot; use tower_http::services::ServeDir; use url::Url; @@ -16,6 +15,11 @@ impl SimpleChannelServer { pub fn url(&self) -> Url { Url::parse(&format!("http://localhost:{}", self.local_addr.port())).unwrap() } + + #[allow(dead_code)] + pub fn channel(&self) -> Channel { + Channel::from_url(self.url()) + } } impl SimpleChannelServer { diff --git a/crates/rattler_solve/src/lib.rs b/crates/rattler_solve/src/lib.rs index 8f2160661..e656807b8 100644 --- a/crates/rattler_solve/src/lib.rs +++ b/crates/rattler_solve/src/lib.rs @@ -159,3 +159,15 @@ impl<'a, S: SolverRepoData<'a>> IntoRepoData<'a, S> for S { self } } + +/// A helper struct that implements `IntoRepoData` for anything that can +/// iterate over `RepoDataRecord`s. +pub struct RepoDataIter(pub T); + +impl<'a, T: IntoIterator, S: SolverRepoData<'a>> IntoRepoData<'a, S> + for RepoDataIter +{ + fn into(self) -> S { + self.0.into_iter().collect() + } +} diff --git a/py-rattler/Cargo.lock b/py-rattler/Cargo.lock index b76a585a3..97c9e24ef 100644 --- a/py-rattler/Cargo.lock +++ b/py-rattler/Cargo.lock @@ -505,6 +505,25 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "crossbeam-deque" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" version = "0.8.19" @@ -556,6 +575,19 @@ dependencies = [ "syn 2.0.59", ] +[[package]] +name = "dashmap" +version = "5.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +dependencies = [ + "cfg-if", + "hashbrown 0.14.3", + "lock_api", + "once_cell", + "parking_lot_core", +] + [[package]] name = "deranged" version = "0.3.11" @@ -768,6 +800,15 @@ version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984" +[[package]] +name = "file_url" +version = "0.1.0" +dependencies = [ + "itertools", + "percent-encoding", + "url", +] + [[package]] name = "filetime" version = "0.2.23" @@ -983,6 +1024,7 @@ version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ + "serde", "typenum", "version_check", ] @@ -1073,6 +1115,25 @@ dependencies = [ "tracing", ] +[[package]] +name = "h2" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "816ec7294445779408f36fe57bc5b7fc1cf59664059096c65f905c1c61f58069" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 1.1.0", + "indexmap 2.2.6", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "hashbrown" version = "0.12.3" @@ -1189,6 +1250,29 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "http-cache-semantics" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92baf25cf0b8c9246baecf3a444546360a97b569168fdf92563ee6a47829920c" +dependencies = [ + "http 1.1.0", + "http-serde", + "reqwest 0.12.3", + "serde", + "time", +] + +[[package]] +name = "http-serde" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1133cafcce27ea69d35e56b3a8772e265633e04de73c5f4e1afdffc1d19b5419" +dependencies = [ + "http 1.1.0", + "serde", +] + [[package]] name = "httparse" version = "1.8.0" @@ -1226,14 +1310,14 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", "httparse", "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.10", + "socket2 0.5.6", "tokio", "tower-service", "tracing", @@ -1249,6 +1333,7 @@ dependencies = [ "bytes", "futures-channel", "futures-util", + "h2 0.4.4", "http 1.1.0", "http-body 1.0.0", "httparse", @@ -1978,6 +2063,12 @@ dependencies = [ "windows-targets 0.48.5", ] +[[package]] +name = "paste" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" + [[package]] name = "pem" version = "3.0.4" @@ -2407,7 +2498,7 @@ dependencies = [ [[package]] name = "rattler" -version = "0.23.1" +version = "0.24.0" dependencies = [ "anyhow", "bytes", @@ -2443,9 +2534,10 @@ dependencies = [ [[package]] name = "rattler_conda_types" -version = "0.22.0" +version = "0.22.1" dependencies = [ "chrono", + "file_url", "fxhash", "glob", "hex", @@ -2473,6 +2565,7 @@ version = "0.19.3" dependencies = [ "blake2", "digest", + "generic-array", "hex", "md-5", "serde", @@ -2483,7 +2576,7 @@ dependencies = [ [[package]] name = "rattler_index" -version = "0.19.8" +version = "0.19.10" dependencies = [ "fs-err", "rattler_conda_types", @@ -2496,20 +2589,22 @@ dependencies = [ [[package]] name = "rattler_lock" -version = "0.22.3" +version = "0.22.5" dependencies = [ "chrono", + "file_url", "fxhash", "indexmap 2.2.6", "itertools", "pep440_rs", "pep508_rs", - "percent-encoding", "purl", "rattler_conda_types", "rattler_digest", + "rayon", "serde", "serde_json", + "serde_repr", "serde_with", "serde_yaml", "thiserror", @@ -2526,7 +2621,7 @@ dependencies = [ [[package]] name = "rattler_networking" -version = "0.20.3" +version = "0.20.5" dependencies = [ "anyhow", "async-trait", @@ -2552,7 +2647,7 @@ dependencies = [ [[package]] name = "rattler_package_streaming" -version = "0.20.6" +version = "0.20.8" dependencies = [ "bzip2", "chrono", @@ -2576,28 +2671,40 @@ dependencies = [ [[package]] name = "rattler_repodata_gateway" -version = "0.19.9" +version = "0.19.11" dependencies = [ "anyhow", "async-compression", + "async-trait", "blake2", + "bytes", "cache_control", "chrono", + "dashmap", + "dirs", + "file_url", "futures", "hex", + "http 1.1.0", + "http-cache-semantics", "humansize", "humantime", "itertools", "json-patch", "libc", + "md-5", "memmap2", "ouroboros", + "parking_lot", + "percent-encoding", "pin-project-lite", "rattler_conda_types", "rattler_digest", "rattler_networking", + "rayon", "reqwest 0.12.3", "reqwest-middleware", + "rmp-serde", "serde", "serde_json", "serde_with", @@ -2605,15 +2712,17 @@ dependencies = [ "tempfile", "thiserror", "tokio", + "tokio-rayon", "tokio-util", "tracing", "url", "windows-sys 0.52.0", + "zstd", ] [[package]] name = "rattler_shell" -version = "0.20.1" +version = "0.20.2" dependencies = [ "enum_dispatch", "indexmap 2.2.6", @@ -2628,7 +2737,7 @@ dependencies = [ [[package]] name = "rattler_solve" -version = "0.21.0" +version = "0.21.1" dependencies = [ "chrono", "futures", @@ -2644,7 +2753,7 @@ dependencies = [ [[package]] name = "rattler_virtual_packages" -version = "0.19.8" +version = "0.19.9" dependencies = [ "archspec", "libloading", @@ -2658,6 +2767,26 @@ dependencies = [ "tracing", ] +[[package]] +name = "rayon" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + [[package]] name = "redox_syscall" version = "0.4.1" @@ -2729,7 +2858,7 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", "hyper 0.14.28", @@ -2773,6 +2902,7 @@ dependencies = [ "bytes", "futures-core", "futures-util", + "h2 0.4.4", "http 1.1.0", "http-body 1.0.0", "http-body-util", @@ -2865,6 +2995,28 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "rmp" +version = "0.8.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "228ed7c16fa39782c3b3468e974aec2795e9089153cd08ee2e9aefb3613334c4" +dependencies = [ + "byteorder", + "num-traits", + "paste", +] + +[[package]] +name = "rmp-serde" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52e599a477cf9840e92f2cde9a7189e67b42c57532749bf90aea6ec10facd4db" +dependencies = [ + "byteorder", + "rmp", + "serde", +] + [[package]] name = "rustc-demangle" version = "0.1.23" @@ -3527,6 +3679,16 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-rayon" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cf33a76e0b1dd03b778f83244137bd59887abf25c0e87bc3e7071105f457693" +dependencies = [ + "rayon", + "tokio", +] + [[package]] name = "tokio-rustls" version = "0.24.1" diff --git a/py-rattler/Cargo.toml b/py-rattler/Cargo.toml index 42f11845d..1f9854b55 100644 --- a/py-rattler/Cargo.toml +++ b/py-rattler/Cargo.toml @@ -22,6 +22,7 @@ futures = "0.3.30" rattler = { path = "../crates/rattler", default-features = false } rattler_repodata_gateway = { path = "../crates/rattler_repodata_gateway", default-features = false, features = [ "sparse", + "gateway", ] } rattler_conda_types = { path = "../crates/rattler_conda_types", default-features = false } rattler_digest = { path = "../crates/rattler_digest" } @@ -39,6 +40,7 @@ pyo3 = { version = "0.20", features = [ "abi3-py38", "extension-module", "multiple-pymethods", + ] } pyo3-asyncio = { version = "0.20", features = ["tokio-runtime"] } tokio = { version = "1.37" } diff --git a/py-rattler/docs/gateway.md b/py-rattler/docs/gateway.md new file mode 100644 index 000000000..2af00aa81 --- /dev/null +++ b/py-rattler/docs/gateway.md @@ -0,0 +1,3 @@ +# Gateway + +::: rattler.repo_data.gateway diff --git a/py-rattler/docs/stylesheets/extra.css b/py-rattler/docs/stylesheets/extra.css index 6678fd5ad..66be165cc 100644 --- a/py-rattler/docs/stylesheets/extra.css +++ b/py-rattler/docs/stylesheets/extra.css @@ -2,18 +2,60 @@ font-family: 'Dosis', sans-serif; } +[data-md-color-primary=prefix] { + --md-primary-fg-color: #F9C405; + --md-primary-fg-color--light: #ffee57; + --md-primary-fg-color--dark: #F9C405; + --md-primary-bg-color: #000000de; + --md-primary-bg-color--light: #0000008a +} + +[data-md-color-accent=prefix] { + --md-accent-fg-color: #fa0; + --md-accent2-fg-color: #eab308; + --md-accent-fg-color--transparent: #ffaa001a; + --md-accent-bg-color: #000000de; + --md-accent-bg-color--light: #0000008a +} + + +[data-md-color-primary=prefix-light] { + --md-primary-fg-color: #000000de; + --md-primary-fg-color--light: #ffee57; + --md-primary-fg-color--dark: #F9C405; + --md-primary-bg-color: #F9C405; + --md-primary-bg-color--light: #F9C405; + --md-code-bg-color: rgba(0, 0, 0, 0.04); +} + +[data-md-color-accent=prefix-light] { + --md-accent-fg-color: #2e2400; + --md-accent2-fg-color: #19116f; + --md-accent-fg-color--transparent: #ffaa001a; + --md-accent-bg-color: #000000de; + --md-accent-bg-color--light: #0000008a +} + +.md-typeset a { + color: var(--md-accent2-fg-color); +} + +.md-nav__item .md-nav__link--active, .md-nav__item .md-nav__link--active code { + color: var(--md-accent-fg-color); + font-weight: bold; +} .md-header__topic:first-child { font-weight: normal; } .md-typeset h1 { - color: #ffec3d; + color: var(--md-accent-fg-color); } .md-typeset h1, .md-typeset h2, .md-typeset h3, .md-typeset h4, .md-typeset h5, .md-typeset h6 { font-family: 'Dosis', sans-serif; font-weight: 500; - color: #ffec3d; + color: var(--md-accent-fg-color); } .md-typeset p { @@ -22,7 +64,7 @@ } :root > * { - --md-code-hl-string-color: #ffec3d; + --md-code-hl-string-color: var(--md-accent-fg-color); } .md-header__button.md-logo { diff --git a/py-rattler/mkdocs.yml b/py-rattler/mkdocs.yml index f480a9889..719c5fbff 100644 --- a/py-rattler/mkdocs.yml +++ b/py-rattler/mkdocs.yml @@ -6,28 +6,84 @@ theme: primary: yellow accent: yellow scheme: slate - + site_url: https://prefix.dev font: text: Red Hat Text code: JetBrains Mono + palette: + # Palette toggle for automatic mode + - media: "(prefers-color-scheme)" + toggle: + icon: material/brightness-auto + name: Switch to light mode + + # Palette toggle for light mode + - media: "(prefers-color-scheme: light)" + scheme: default + primary: prefix-light + accent: prefix-light + toggle: + icon: material/brightness-7 + name: Switch to dark mode + + # Palette toggle for dark mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: prefix + accent: prefix + toggle: + icon: material/brightness-4 + name: Switch to system preference + icon: + edit: material/pencil + view: material/eye + + features: + - content.tabs.link + - content.code.copy + - content.action.edit + - content.code.annotate + # - content.code.select Sponsor only + - navigation.instant + - navigation.instant.progress + - navigation.tracking + - navigation.sections + - navigation.top + - navigation.footer extra_css: - stylesheets/extra.css repo_url: https://github.com/mamba-org/rattler/ +edit_uri: edit/main/py-rattler/docs/ markdown_extensions: + - admonition + - def_list + - footnotes + - admonition + - def_list + - footnotes + - pymdownx.tasklist: + custom_checkbox: true - pymdownx.highlight: anchor_linenums: true line_spans: __span pygments_lang_class: true - pymdownx.inlinehilite - pymdownx.snippets - - pymdownx.superfences - - admonition - - def_list - - footnotes + - pymdownx.details + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + - pymdownx.tabbed: + alternate_style: true + - toc: + toc_depth: 3 + permalink: "#" + - mdx_truly_sane_lists nav: - First Steps: index.md @@ -69,6 +125,7 @@ nav: - PrefixPaths: prefix_paths.md - PrefixRecord: prefix_record.md - repo_data: + - Gateway: gateway.md - PackageRecord: package_record.md - PatchInstructions: patch_instructions.md - RepoDataRecord: repo_data_record.md diff --git a/py-rattler/pixi.lock b/py-rattler/pixi.lock index 98f80edc3..f42976b16 100644 --- a/py-rattler/pixi.lock +++ b/py-rattler/pixi.lock @@ -7,13 +7,18 @@ environments: linux-64: - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.40-ha885e6a_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.2.2-hbcca054_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-13.2.0-h9eb54c0_6.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-2.6.32-he073ed8_17.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h55db66e_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/libgcc-devel_linux-64-13.2.0-hceb6213_106.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-hc881cc4_6.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-hc881cc4_6.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libsanitizer-13.2.0-h6ddb7a1_6.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.45.3-h2797004_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h95c4c6d_6.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda @@ -27,7 +32,10 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.8.19-hd12c33a_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-4_cp38.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rust-1.77.2-h70c747d_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-unknown-linux-gnu-1.77.2-h2c6d0dc_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-69.5.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.12-he073ed8_17.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.43.0-pyhd8ed1ab_1.conda @@ -45,6 +53,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.8.19-h5ba8234_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.8-4_cp38.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/rust-1.77.2-h7e1429e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-apple-darwin-1.77.2-h38e4360_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-69.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 @@ -63,6 +73,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.8.19-h2469fbe_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.8-4_cp38.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.2-h92ec313_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rust-1.77.2-h4ff7c5d_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-aarch64-apple-darwin-1.77.2-hf6ec828_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-69.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h5083fa2_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 @@ -85,6 +97,8 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.8.19-h4de0772_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.8-4_cp38.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/rust-1.77.2-hf8d6059_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-pc-windows-msvc-1.77.2-h17fc481_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-69.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 @@ -107,14 +121,14 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py38h17151c0_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h30efb56_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.2.2-hbcca054_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cairocffi-1.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cairosvg-2.7.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.2.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py38h6d47a40_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py312hf06ca03_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 @@ -157,11 +171,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-3.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py38h01eb140_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py312h98912ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mdx_truly_sane_lists-1.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/mergedeep-1.3.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-1.5.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-autorefs-1.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-material-9.5.18-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-material-9.5.20-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-material-extensions-1.3.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocstrings-0.24.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocstrings-python-1.9.2-pyhd8ed1ab_0.conda @@ -172,7 +187,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/paginate-0.5.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.43-hcad00b1_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.3.0-py38h9e66945_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.3.0-py312hdcec9eb_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2 @@ -180,14 +195,14 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.17.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pymdown-extensions-10.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.8.19-hd12c33a_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.3-hab00c5b_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-4_cp38.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-4_cp312.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py38h01eb140_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py312h98912ed_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyyaml-env-tag-0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/regex-2024.4.16-py38h01eb140_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/regex-2024.4.28-py312h9a8786e_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-69.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 @@ -195,8 +210,9 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.11.0-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.11.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/watchdog-4.0.0-py38h578d9bd_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/watchdog-4.0.0-py312h7900ff3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda @@ -217,14 +233,14 @@ environments: osx-64: - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py38h940360d_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py312heafc425_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-h10d778d_5.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.2.2-h8857fd0_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/cairo-1.18.0-h99e66fa_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cairocffi-1.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cairosvg-2.7.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.2.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/cffi-1.16.0-py38h082e395_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/cffi-1.16.0-py312h38bf5a0_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 @@ -262,11 +278,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libxcb-1.15-hb7f2c08_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.2.13-h8a1eda9_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-3.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-2.1.5-py38hae2e43d_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-2.1.5-py312h41838bb_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mdx_truly_sane_lists-1.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/mergedeep-1.3.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-1.5.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-autorefs-1.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-material-9.5.18-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-material-9.5.20-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-material-extensions-1.3.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocstrings-0.24.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocstrings-python-1.9.2-pyhd8ed1ab_0.conda @@ -277,7 +294,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/paginate-0.5.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pcre2-10.43-h0ad2156_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pillow-10.3.0-py38h85abd47_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pillow-10.3.0-py312h0c923fa_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pixman-0.43.4-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/pthread-stubs-0.4-hc929b4f_1001.tar.bz2 @@ -285,14 +302,14 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.17.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pymdown-extensions-10.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.8.19-h5ba8234_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.12.3-h1411813_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.8-4_cp38.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-4_cp312.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0.1-py38hcafd530_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0.1-py312h104f124_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyyaml-env-tag-0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/regex-2024.4.16-py38hae2e43d_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/regex-2024.4.28-py312h5fa3f64_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-69.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 @@ -300,8 +317,9 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.11.0-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.11.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-64/watchdog-4.0.0-py38h2bea1e5_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/watchdog-4.0.0-py312hc2c2f20_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxau-1.0.11-h0dc2134_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xorg-libxdmcp-1.1.3-h35c211d_0.tar.bz2 @@ -313,14 +331,14 @@ environments: osx-arm64: - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py38he333c0f_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py312h9f69965_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h93a5062_5.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ca-certificates-2024.2.2-hf0a4a13_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cairo-1.18.0-hd1e100b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cairocffi-1.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cairosvg-2.7.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.2.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cffi-1.16.0-py38h73f40f7_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cffi-1.16.0-py312h8e38eb3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 @@ -358,11 +376,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxcb-1.15-hf346824_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.2.13-h53f4e23_5.conda - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-3.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-2.1.5-py38h336bac9_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-2.1.5-py312he37b823_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mdx_truly_sane_lists-1.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/mergedeep-1.3.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-1.5.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-autorefs-1.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-material-9.5.18-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-material-9.5.20-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-material-extensions-1.3.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocstrings-0.24.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocstrings-python-1.9.2-pyhd8ed1ab_0.conda @@ -373,7 +392,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/paginate-0.5.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pcre2-10.43-h26f9a81_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-10.3.0-py38h9ef4633_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-10.3.0-py312h8a801b1_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pixman-0.43.4-hebf3989_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pthread-stubs-0.4-h27ca646_1001.tar.bz2 @@ -381,14 +400,14 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.17.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pymdown-extensions-10.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.8.19-h2469fbe_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.12.3-h4a7b5fc_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.8-4_cp38.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.12-4_cp312.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyyaml-6.0.1-py38hb192615_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyyaml-6.0.1-py312h02f2b3b_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyyaml-env-tag-0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.2-h92ec313_1.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/regex-2024.4.16-py38h336bac9_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/regex-2024.4.28-py312h4a164c9_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-69.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 @@ -396,8 +415,9 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h5083fa2_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.11.0-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.11.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/osx-arm64/watchdog-4.0.0-py38h336bac9_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/watchdog-4.0.0-py312he37b823_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-libxau-1.0.11-hb547adb_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-libxdmcp-1.1.3-h27ca646_0.tar.bz2 @@ -409,14 +429,14 @@ environments: win-64: - conda: https://conda.anaconda.org/conda-forge/noarch/astunparse-1.6.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py38hd3f51b4_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py312h53d5487_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-hcfcfb64_5.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.2.2-h56e8100_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/cairo-1.18.0-h1fef639_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cairocffi-1.6.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/cairosvg-2.7.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.2.2-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/cffi-1.16.0-py38h91455d4_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/cffi-1.16.0-py312he70551f_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-win_pyh7428d3b_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 @@ -458,11 +478,12 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gmp-6.1.0-2.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-libwinpthread-git-5.0.0.4634.697f757-2.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-3.6-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.5-py38h91455d4_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.5-py312he70551f_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mdx_truly_sane_lists-1.3-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/mergedeep-1.3.4-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-1.5.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-autorefs-1.0.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-material-9.5.18-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-material-9.5.20-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocs-material-extensions-1.3.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocstrings-0.24.3-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mkdocstrings-python-1.9.2-pyhd8ed1ab_0.conda @@ -473,7 +494,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/paginate-0.5.6-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pcre2-10.43-h17e33f8_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pillow-10.3.0-py38h894f861_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pillow-10.3.0-py312h6f6a607_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pixman-0.43.4-h63175ca_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-hcd874cb_1001.tar.bz2 @@ -481,13 +502,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.17.2-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pymdown-extensions-10.8-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyh0701188_6.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.8.19-h4de0772_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.12.3-h2628c8c_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.8-4_cp38.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.12-4_cp312.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.1-py38h91455d4_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.1-py312he70551f_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pyyaml-env-tag-0.1-pyhd8ed1ab_0.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/win-64/regex-2024.4.16-py38h91455d4_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/regex-2024.4.28-py312h4389bb4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-69.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 @@ -495,12 +516,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.11.0-hd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.11.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-hcf57466_18.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.38.33130-h82b7239_18.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.38.33130-hcb4865c_18.conda - - conda: https://conda.anaconda.org/conda-forge/win-64/watchdog-4.0.0-py38haa244fe_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/watchdog-4.0.0-py312h2e8e312_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda - conda: https://conda.anaconda.org/conda-forge/noarch/win_inet_pton-1.1.0-pyhd8ed1ab_6.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/xorg-libxau-1.0.11-hcd874cb_0.conda @@ -517,26 +539,34 @@ environments: linux-64: - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.40-ha885e6a_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.2.2-hbcca054_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-13.2.0-h9eb54c0_6.conda - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-2.6.32-he073ed8_17.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h55db66e_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/libgcc-devel_linux-64-13.2.0-hceb6213_106.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-hc881cc4_6.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-hc881cc4_6.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libsanitizer-13.2.0-h6ddb7a1_6.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.45.3-h2797004_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h95c4c6d_6.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/maturin-1.2.3-py38hcdda232_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/mypy-1.5.1-py38h01eb140_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4.20240210-h59595ed_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.2.1-hd590300_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/patchelf-0.17.2-h58526e2_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.8-py38h01eb140_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.4-pyhd8ed1ab_0.conda @@ -546,9 +576,14 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-4_cp38.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.3.7-py38h18b4745_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rust-1.77.2-h70c747d_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-unknown-linux-gnu-1.77.2-h2c6d0dc_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-69.5.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.12-he073ed8_17.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.11.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.43.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 osx-64: - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-h10d778d_5.conda @@ -560,11 +595,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.4.2-h0d85af4_5.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.45.3-h92b6c6a_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.2.13-h8a1eda9_5.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/maturin-1.2.3-py38h196e9ca_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/mypy-1.5.1-py38hcafd530_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.4.20240210-h73e2aa4_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.2.1-hd75f5a5_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/psutil-5.9.8-py38hae2e43d_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.4-pyhd8ed1ab_0.conda @@ -574,9 +611,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.8-4_cp38.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.3.7-py38h1916ca8_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/rust-1.77.2-h7e1429e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-apple-darwin-1.77.2-h38e4360_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-69.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.11.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.43.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-64/xz-5.2.6-h775f41a_0.tar.bz2 osx-arm64: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h93a5062_5.conda @@ -588,11 +629,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.4.2-h3422bc3_5.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.45.3-h091b4b1_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.2.13-h53f4e23_5.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/maturin-1.2.3-py38h92a0862_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/mypy-1.5.1-py38hb192615_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.4.20240210-h078ce10_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.2.1-h0d3ecfb_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/psutil-5.9.8-py38h336bac9_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.4-pyhd8ed1ab_0.conda @@ -602,9 +645,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.8-4_cp38.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.2-h92ec313_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ruff-0.3.7-py38h5477e86_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rust-1.77.2-h4ff7c5d_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-aarch64-apple-darwin-1.77.2-hf6ec828_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-69.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h5083fa2_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.11.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.43.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xz-5.2.6-h57fd34a_0.tar.bz2 win-64: - conda: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-hcfcfb64_5.conda @@ -615,10 +662,18 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.45.3-hcfcfb64_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/libzlib-1.2.13-hcfcfb64_5.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libgfortran-5.3.0-6.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-5.3.0-7.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-core-5.3.0-7.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-gmp-6.1.0-2.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/win-64/m2w64-libwinpthread-git-5.0.0.4634.697f757-2.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/win-64/maturin-1.2.3-py38hf90c7e5_1.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/msys2-conda-epoch-20160418-1.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/win-64/mypy-1.5.1-py38h91455d4_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/openssl-3.2.1-hcfcfb64_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-24.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/psutil-5.9.8-py38h91455d4_0.conda - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.4-pyhd8ed1ab_0.conda @@ -627,6 +682,9 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/python-3.8.19-h4de0772_0_cpython.conda - conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.8-4_cp38.conda - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.3.7-py38h5e48be7_0.conda + - conda: https://conda.anaconda.org/conda-forge/win-64/rust-1.77.2-hf8d6059_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-pc-windows-msvc-1.77.2-h17fc481_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-69.5.1-pyhd8ed1ab_0.conda - conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.11.0-pyha770c72_0.conda @@ -634,6 +692,7 @@ environments: - conda: https://conda.anaconda.org/conda-forge/win-64/vc-14.3-hcf57466_18.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vc14_runtime-14.38.33130-h82b7239_18.conda - conda: https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.38.33130-hcb4865c_18.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.43.0-pyhd8ed1ab_1.conda - conda: https://conda.anaconda.org/conda-forge/win-64/xz-5.2.6-h8d14728_0.tar.bz2 packages: - kind: conda @@ -697,86 +756,101 @@ packages: license_family: BSD size: 7609750 timestamp: 1702422720584 +- kind: conda + name: binutils_impl_linux-64 + version: '2.40' + build: ha885e6a_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.40-ha885e6a_0.conda + sha256: 180b268f207d1481beb9de5c173751d14c429a7226fa9a85941e4a54cf6be1b4 + md5: 800a4c872b5bc06fa83888d112fe6c4f + depends: + - ld_impl_linux-64 2.40 h55db66e_0 + - sysroot_linux-64 + license: GPL-3.0-only + license_family: GPL + size: 5797310 + timestamp: 1713651250214 - kind: conda name: brotli-python version: 1.1.0 - build: py38h17151c0_1 + build: py312h30efb56_1 build_number: 1 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py38h17151c0_1.conda - sha256: f932ae77f10885dd991b0e1f56f6effea9f19b169e8606dab0bdafd0e44db3c9 - md5: 7a5a699c8992fc51ef25e980f4502c2a + url: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h30efb56_1.conda + sha256: b68706698b6ac0d31196a8bcb061f0d1f35264bcd967ea45e03e108149a74c6f + md5: 45801a89533d3336a365284d93298e36 depends: - libgcc-ng >=12 - libstdcxx-ng >=12 - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - python >=3.12.0rc3,<3.13.0a0 + - python_abi 3.12.* *_cp312 constrains: - libbrotlicommon 1.1.0 hd590300_1 license: MIT license_family: MIT - size: 350830 - timestamp: 1695990250755 + size: 350604 + timestamp: 1695990206327 - kind: conda name: brotli-python version: 1.1.0 - build: py38h940360d_1 + build: py312h53d5487_1 build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py38h940360d_1.conda - sha256: 0a088bff62ddd2e505bdc80cc16da009c134b9ccfa6352b0cfe9d4eeed27d8c2 - md5: ad8d4ae4e8351a2fc0fe92f13bd266d8 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py312h53d5487_1.conda + sha256: 769e276ecdebf86f097786cbde1ebd11e018cd6cd838800995954fe6360e0797 + md5: d01a6667b99f0e8ad4097af66c938e62 depends: - - libcxx >=15.0.7 - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - python >=3.12.0rc3,<3.13.0a0 + - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 constrains: - - libbrotlicommon 1.1.0 h0dc2134_1 + - libbrotlicommon 1.1.0 hcfcfb64_1 license: MIT license_family: MIT - size: 366343 - timestamp: 1695990788245 + size: 322514 + timestamp: 1695991054894 - kind: conda name: brotli-python version: 1.1.0 - build: py38hd3f51b4_1 + build: py312h9f69965_1 build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py38hd3f51b4_1.conda - sha256: a292d6b3118ef284cc03a99a6efe5e08ca3a6d0e37eff78eb8d87cfca3830d7b - md5: 72708ea626a2530148ea49eb743576f4 + subdir: osx-arm64 + url: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py312h9f69965_1.conda + sha256: 3418b1738243abba99e931c017b952771eeaa1f353c07f7d45b55e83bb74fcb3 + md5: 1bc01b9ffdf42beb1a9fe4e9222e0567 depends: - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - libcxx >=15.0.7 + - python >=3.12.0rc3,<3.13.0a0 + - python >=3.12.0rc3,<3.13.0a0 *_cpython + - python_abi 3.12.* *_cp312 constrains: - - libbrotlicommon 1.1.0 hcfcfb64_1 + - libbrotlicommon 1.1.0 hb547adb_1 license: MIT license_family: MIT - size: 321650 - timestamp: 1695990817828 + size: 343435 + timestamp: 1695990731924 - kind: conda name: brotli-python version: 1.1.0 - build: py38he333c0f_1 + build: py312heafc425_1 build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py38he333c0f_1.conda - sha256: 3fd1e0a4b7ea1b20f69bbc2d74c798f3eebd775ccbcdee170f68b1871f8bbb74 - md5: 29160c74d5977b1c5ecd654b00d576f0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py312heafc425_1.conda + sha256: fc55988f9bc05a938ea4b8c20d6545bed6e9c6c10aa5147695f981136ca894c1 + md5: a288b88f06b8bfe0dedaf5c4b6ac6b7a depends: - libcxx >=15.0.7 - - python >=3.8,<3.9.0a0 - - python >=3.8,<3.9.0a0 *_cpython - - python_abi 3.8.* *_cp38 + - python >=3.12.0rc3,<3.13.0a0 + - python_abi 3.12.* *_cp312 constrains: - - libbrotlicommon 1.1.0 hb547adb_1 + - libbrotlicommon 1.1.0 h0dc2134_1 license: MIT license_family: MIT - size: 343036 - timestamp: 1695990970956 + size: 366883 + timestamp: 1695990710194 - kind: conda name: bzip2 version: 1.0.8 @@ -1032,75 +1106,75 @@ packages: - kind: conda name: cffi version: 1.16.0 - build: py38h082e395_0 + build: py312h38bf5a0_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/cffi-1.16.0-py38h082e395_0.conda - sha256: c79e5074c663670f75258f6fce8ebd0e65042bd22ecbb4979294c57ff4fa8fc5 - md5: 046fe2a8edb11f1b8a7d3bd8e2fd1de7 + url: https://conda.anaconda.org/conda-forge/osx-64/cffi-1.16.0-py312h38bf5a0_0.conda + sha256: 8b856583b56fc30f064a7cb286f85e4b5725f2bd4fda8ba0c4e94bffe258741e + md5: a45759c013ab20b9017ef9539d234dd7 depends: - libffi >=3.4,<4.0a0 - pycparser - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - python >=3.12.0rc3,<3.13.0a0 + - python_abi 3.12.* *_cp312 license: MIT license_family: MIT - size: 228367 - timestamp: 1696002058694 + size: 282370 + timestamp: 1696002004433 - kind: conda name: cffi version: 1.16.0 - build: py38h6d47a40_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py38h6d47a40_0.conda - sha256: ec0a62d4836d3ec2321d07cffa5aeef37c6818c6cce6383dc6be7205d09551b3 - md5: fc010dfb8ce6540d289436fbba499ee7 + build: py312h8e38eb3_0 + subdir: osx-arm64 + url: https://conda.anaconda.org/conda-forge/osx-arm64/cffi-1.16.0-py312h8e38eb3_0.conda + sha256: 1544403cb1a5ca2aeabf0dac86d9ce6066d6fb4363493643b33ffd1b78038d18 + md5: 960ecbd65860d3b1de5e30373e1bffb1 depends: - libffi >=3.4,<4.0a0 - - libgcc-ng >=12 - pycparser - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - python >=3.12.0rc3,<3.13.0a0 + - python >=3.12.0rc3,<3.13.0a0 *_cpython + - python_abi 3.12.* *_cp312 license: MIT license_family: MIT - size: 239127 - timestamp: 1696001978654 + size: 284245 + timestamp: 1696002181644 - kind: conda name: cffi version: 1.16.0 - build: py38h73f40f7_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/cffi-1.16.0-py38h73f40f7_0.conda - sha256: 375e0be4068f4b00facfa569aa26c92ed87858f45be875f2c4bf90f33733f4de - md5: 02911ce6163d7a3e8fe9d9398fb9986d + build: py312he70551f_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/cffi-1.16.0-py312he70551f_0.conda + sha256: dd39e594f5c6bca52dfed343de2af9326a99700ce2ba3404bd89706926fc0137 + md5: 5a51096925d52332c62bfd8904899055 depends: - - libffi >=3.4,<4.0a0 - pycparser - - python >=3.8,<3.9.0a0 - - python >=3.8,<3.9.0a0 *_cpython - - python_abi 3.8.* *_cp38 + - python >=3.12.0rc3,<3.13.0a0 + - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: MIT license_family: MIT - size: 230759 - timestamp: 1696002169830 + size: 287805 + timestamp: 1696002408940 - kind: conda name: cffi version: 1.16.0 - build: py38h91455d4_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/cffi-1.16.0-py38h91455d4_0.conda - sha256: 0704377274cfe0b3a5c308facecdeaaf2207303ee847842a4bbd3f70b7331ddc - md5: e9b2ac14b9c3d3eaeb2f69745e021e49 + build: py312hf06ca03_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py312hf06ca03_0.conda + sha256: 5a36e2c254603c367d26378fa3a205bd92263e30acf195f488749562b4c44251 + md5: 56b0ca764ce23cc54f3f7e2a7b970f6d depends: + - libffi >=3.4,<4.0a0 + - libgcc-ng >=12 - pycparser - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - python >=3.12.0rc3,<3.13.0a0 + - python_abi 3.12.* *_cp312 license: MIT license_family: MIT - size: 234905 - timestamp: 1696002150251 + size: 294523 + timestamp: 1696001868949 - kind: conda name: charset-normalizer version: 3.3.2 @@ -1488,6 +1562,27 @@ packages: license: GPL-2.0-only OR FTL size: 510306 timestamp: 1694616398888 +- kind: conda + name: gcc_impl_linux-64 + version: 13.2.0 + build: h9eb54c0_6 + build_number: 6 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-13.2.0-h9eb54c0_6.conda + sha256: 67d16151d316f04ea2779ff3a4f5fcf4a5454e89bc21dabc1a4f7c08cf5ea821 + md5: 36ca2a36806ab26c2daf20d5b62280d7 + depends: + - binutils_impl_linux-64 >=2.40 + - libgcc-devel_linux-64 13.2.0 hceb6213_106 + - libgcc-ng >=13.2.0 + - libgomp >=13.2.0 + - libsanitizer 13.2.0 h6ddb7a1_6 + - libstdcxx-ng >=13.2.0 + - sysroot_linux-64 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 53360656 + timestamp: 1714581875812 - kind: conda name: ghp-import version: 2.1.0 @@ -1637,6 +1732,22 @@ packages: license_family: BSD size: 111589 timestamp: 1704967140287 +- kind: conda + name: kernel-headers_linux-64 + version: 2.6.32 + build: he073ed8_17 + build_number: 17 + subdir: noarch + noarch: generic + url: https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-2.6.32-he073ed8_17.conda + sha256: fb39d64b48f3d9d1acc3df208911a41f25b6a00bd54935d5973b4739a9edd5b6 + md5: d731b543793afc0433c4fd593e693fce + constrains: + - sysroot_linux-64 ==2.12 + license: LGPL-2.0-or-later AND LGPL-2.0-or-later WITH exceptions AND GPL-2.0-or-later AND MPL-2.0 + license_family: GPL + size: 710627 + timestamp: 1708000830116 - kind: conda name: lcms2 version: '2.16' @@ -1966,6 +2077,20 @@ packages: license_family: MIT size: 42063 timestamp: 1636489106777 +- kind: conda + name: libgcc-devel_linux-64 + version: 13.2.0 + build: hceb6213_106 + build_number: 106 + subdir: noarch + noarch: generic + url: https://conda.anaconda.org/conda-forge/noarch/libgcc-devel_linux-64-13.2.0-hceb6213_106.conda + sha256: f5af7a346ba0a2c322028a7fa8ba99f5094911439d5aab2c6bc42a4e9022bc68 + md5: b85d6b583f498b4ddc9150aefb492f7f + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 2575829 + timestamp: 1714581666472 - kind: conda name: libgcc-ng version: 13.2.0 @@ -2310,6 +2435,21 @@ packages: license: zlib-acknowledgement size: 268524 timestamp: 1708780496420 +- kind: conda + name: libsanitizer + version: 13.2.0 + build: h6ddb7a1_6 + build_number: 6 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libsanitizer-13.2.0-h6ddb7a1_6.conda + sha256: 06f3695963ee86badbfe006f13fa9fe600539acb77f19c5c972d498a14e9b53d + md5: 95b48df99634d9e706a0bf7e30ae91c8 + depends: + - libgcc-ng >=13.2.0 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + size: 4188343 + timestamp: 1714581787957 - kind: conda name: libsqlite version: 3.45.3 @@ -2783,76 +2923,76 @@ packages: - kind: conda name: markupsafe version: 2.1.5 - build: py38h01eb140_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py38h01eb140_0.conda - sha256: 384a193d11c89463533e6fc5d94a6c67c16c598b32747a8f86f9ad227f0aed17 - md5: aeeb09febb02542e020c3ba7084ead01 + build: py312h41838bb_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-2.1.5-py312h41838bb_0.conda + sha256: 8dc8f31f78d00713300da000b6ebaa1943a17c112f267de310d5c3d82950079c + md5: c4a9c25c09cef3901789ca818d9beb10 depends: - - libgcc-ng >=12 - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 constrains: - jinja2 >=3.0.0 license: BSD-3-Clause license_family: BSD - size: 24274 - timestamp: 1706900087252 + size: 25742 + timestamp: 1706900456837 - kind: conda name: markupsafe version: 2.1.5 - build: py38h336bac9_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-2.1.5-py38h336bac9_0.conda - sha256: f1b1b405c5246c499d66658e754e920529866826b247111cd481e15d0571f702 - md5: 76e1802508a91e5970f42f6558f5064e + build: py312h98912ed_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py312h98912ed_0.conda + sha256: 273d8efd6c089c534ccbede566394c0ac1e265bfe5d89fe76e80332f3d75a636 + md5: 6ff0b9582da2d4a74a1f9ae1f9ce2af6 depends: - - python >=3.8,<3.9.0a0 - - python >=3.8,<3.9.0a0 *_cpython - - python_abi 3.8.* *_cp38 + - libgcc-ng >=12 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 constrains: - jinja2 >=3.0.0 license: BSD-3-Clause license_family: BSD - size: 23719 - timestamp: 1706900313162 + size: 26685 + timestamp: 1706900070330 - kind: conda name: markupsafe version: 2.1.5 - build: py38h91455d4_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.5-py38h91455d4_0.conda - sha256: a0753407d33dbeebf3ee3118cc4bd3559af81e3de497b15f01a52b2702314c73 - md5: 0b3eb104f5c37ba2e7ec675b6a8ea453 + build: py312he37b823_0 + subdir: osx-arm64 + url: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-2.1.5-py312he37b823_0.conda + sha256: 61480b725490f68856dd14e646f51ffc34f77f2c985bd33e3b77c04b2856d97d + md5: ba3a8f8cf8bbdb81394275b1e1d271da depends: - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - python >=3.12,<3.13.0a0 + - python >=3.12,<3.13.0a0 *_cpython + - python_abi 3.12.* *_cp312 constrains: - jinja2 >=3.0.0 license: BSD-3-Clause license_family: BSD - size: 26598 - timestamp: 1706900643364 + size: 26382 + timestamp: 1706900495057 - kind: conda name: markupsafe version: 2.1.5 - build: py38hae2e43d_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/markupsafe-2.1.5-py38hae2e43d_0.conda - sha256: ef6eaa455d99e40df64131d23f4b52bc3601f95a48f255cb9917f2d4eb760a36 - md5: 5107dae4aa6cbcb0cb73718cdd951c29 + build: py312he70551f_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.5-py312he70551f_0.conda + sha256: f8690a3c87e2e96cebd434a829bb95cac43afe6c439530b336dc3452fe4ce4af + md5: 4950a739b19edaac1ed29ca9474e49ac depends: - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 constrains: - jinja2 >=3.0.0 license: BSD-3-Clause license_family: BSD - size: 23167 - timestamp: 1706900242727 + size: 29060 + timestamp: 1706900374745 - kind: conda name: maturin version: 1.2.3 @@ -2928,6 +3068,22 @@ packages: license_family: MIT size: 4675089 timestamp: 1695301899264 +- kind: conda + name: mdx_truly_sane_lists + version: '1.3' + build: pyhd8ed1ab_0 + subdir: noarch + noarch: python + url: https://conda.anaconda.org/conda-forge/noarch/mdx_truly_sane_lists-1.3-pyhd8ed1ab_0.tar.bz2 + sha256: 2a00cd521d63ae8a20b52de590ff2f1f63ea4ba569f7e66ae629330f0e69cf43 + md5: 3c4c4f9b8ae968cb20823351d81d12b5 + depends: + - markdown >=2.6 + - python >=3.6 + license: MIT + license_family: MIT + size: 10480 + timestamp: 1658251565870 - kind: conda name: mergedeep version: 1.3.4 @@ -2995,13 +3151,13 @@ packages: timestamp: 1709500020733 - kind: conda name: mkdocs-material - version: 9.5.18 + version: 9.5.20 build: pyhd8ed1ab_0 subdir: noarch noarch: python - url: https://conda.anaconda.org/conda-forge/noarch/mkdocs-material-9.5.18-pyhd8ed1ab_0.conda - sha256: 47f3d939ade648d8288705f1dd95c9a1f80b10772b1fdbd8a81a6fe92689c395 - md5: d5d4bb5f8a501e9ce4bc73a5baa3553b + url: https://conda.anaconda.org/conda-forge/noarch/mkdocs-material-9.5.20-pyhd8ed1ab_0.conda + sha256: 38f61b17fa334d20a60c5a37eefa836e05c4e4b0a3cff763591c941be90de348 + md5: 5f09758905bfaf7d5c748196f63aba35 depends: - babel ~=2.10 - colorama ~=0.4 @@ -3017,8 +3173,8 @@ packages: - requests ~=2.26 license: MIT license_family: MIT - size: 5002352 - timestamp: 1713278417020 + size: 5007228 + timestamp: 1714393800216 - kind: conda name: mkdocs-material-extensions version: 1.3.1 @@ -3494,11 +3650,11 @@ packages: - kind: conda name: pillow version: 10.3.0 - build: py38h85abd47_0 + build: py312h0c923fa_0 subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pillow-10.3.0-py38h85abd47_0.conda - sha256: b3f85b5c20ab9d2614e81816adb0a88769ae5b3392e02b802d470b3e4805e0bf - md5: 785f9ef8a330dcf8d6619c127b13f21d + url: https://conda.anaconda.org/conda-forge/osx-64/pillow-10.3.0-py312h0c923fa_0.conda + sha256: 3e33ce8ba364948eeeeb06da435059b1ed0e6cfb2b1195931b76e190ee671310 + md5: 6f0591ae972e9b815739da3392fbb3c3 depends: - freetype >=2.12.1,<3.0a0 - lcms2 >=2.16,<3.0a0 @@ -3508,20 +3664,20 @@ packages: - libxcb >=1.15,<1.16.0a0 - libzlib >=1.2.13,<1.3.0a0 - openjpeg >=2.5.2,<3.0a0 - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 - tk >=8.6.13,<8.7.0a0 license: HPND - size: 41189766 - timestamp: 1712154782357 + size: 42531277 + timestamp: 1712154782302 - kind: conda name: pillow version: 10.3.0 - build: py38h894f861_0 + build: py312h6f6a607_0 subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pillow-10.3.0-py38h894f861_0.conda - sha256: d661083709fd0e2262388adeae52a0c8591c762a54582a6fa0941158990ecd68 - md5: 9b2eb85eed298007db9c714981ab87fe + url: https://conda.anaconda.org/conda-forge/win-64/pillow-10.3.0-py312h6f6a607_0.conda + sha256: f1621c28346609886ccce14b6ae0069b5cb34925ace73e05a8c06770d2ad7a19 + md5: 8d5f5f1fa36200f1ef987299a47de403 depends: - freetype >=2.12.1,<3.0a0 - lcms2 >=2.16,<3.0a0 @@ -3531,63 +3687,63 @@ packages: - libxcb >=1.15,<1.16.0a0 - libzlib >=1.2.13,<1.3.0a0 - openjpeg >=2.5.2,<3.0a0 - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 - tk >=8.6.13,<8.7.0a0 - ucrt >=10.0.20348.0 - vc >=14.2,<15 - vc14_runtime >=14.29.30139 license: HPND - size: 40924506 - timestamp: 1712155038917 + size: 42439434 + timestamp: 1712155248737 - kind: conda name: pillow version: 10.3.0 - build: py38h9e66945_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.3.0-py38h9e66945_0.conda - sha256: b44195d022fa378808397b29406447f0a9e0e4486d03a8a97f014e8f78b091a5 - md5: 06a7c758cf349a5bf24989f179bb504e + build: py312h8a801b1_0 + subdir: osx-arm64 + url: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-10.3.0-py312h8a801b1_0.conda + sha256: 26bc04e81ae5fce70e4b72478dadea29d32b693eed17640be7721108a3c9af0d + md5: 1d42544faaed27dce36268912b8dfedf depends: - freetype >=2.12.1,<3.0a0 - lcms2 >=2.16,<3.0a0 - - libgcc-ng >=12 - libjpeg-turbo >=3.0.0,<4.0a0 - libtiff >=4.6.0,<4.7.0a0 - libwebp-base >=1.3.2,<2.0a0 - libxcb >=1.15,<1.16.0a0 - libzlib >=1.2.13,<1.3.0a0 - openjpeg >=2.5.2,<3.0a0 - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - python >=3.12,<3.13.0a0 + - python >=3.12,<3.13.0a0 *_cpython + - python_abi 3.12.* *_cp312 - tk >=8.6.13,<8.7.0a0 license: HPND - size: 42314111 - timestamp: 1712154579877 + size: 42729895 + timestamp: 1712155044162 - kind: conda name: pillow version: 10.3.0 - build: py38h9ef4633_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-10.3.0-py38h9ef4633_0.conda - sha256: 1d32b60ffb4b851e839e7d0ee5930b9d2a4c6158ee149b400ab9cf55f41c7a3c - md5: c43a7c5715ba3697fb8e9fc5da1a2d96 + build: py312hdcec9eb_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.3.0-py312hdcec9eb_0.conda + sha256: a7fdcc1e56b66d95622bad073cc8d347cc180988040419754abb2a4ed7b29471 + md5: 425bb325f970e57a047ac57c4586489d depends: - freetype >=2.12.1,<3.0a0 - lcms2 >=2.16,<3.0a0 + - libgcc-ng >=12 - libjpeg-turbo >=3.0.0,<4.0a0 - libtiff >=4.6.0,<4.7.0a0 - libwebp-base >=1.3.2,<2.0a0 - libxcb >=1.15,<1.16.0a0 - libzlib >=1.2.13,<1.3.0a0 - openjpeg >=2.5.2,<3.0a0 - - python >=3.8,<3.9.0a0 - - python >=3.8,<3.9.0a0 *_cpython - - python_abi 3.8.* *_cp38 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 - tk >=8.6.13,<8.7.0a0 license: HPND - size: 41460984 - timestamp: 1712155043935 + size: 41991755 + timestamp: 1712154634705 - kind: conda name: pip version: 23.2.1 @@ -4051,6 +4207,114 @@ packages: license: Python-2.0 size: 22357104 timestamp: 1710939954552 +- kind: conda + name: python + version: 3.12.3 + build: h1411813_0_cpython + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/python-3.12.3-h1411813_0_cpython.conda + sha256: 3b327ffc152a245011011d1d730781577a8274fde1cf6243f073749ead8f1c2a + md5: df1448ec6cbf8eceb03d29003cf72ae6 + depends: + - __osx >=10.9 + - bzip2 >=1.0.8,<2.0a0 + - libexpat >=2.6.2,<3.0a0 + - libffi >=3.4,<4.0a0 + - libsqlite >=3.45.2,<4.0a0 + - libzlib >=1.2.13,<1.3.0a0 + - ncurses >=6.4.20240210,<7.0a0 + - openssl >=3.2.1,<4.0a0 + - readline >=8.2,<9.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + - xz >=5.2.6,<6.0a0 + constrains: + - python_abi 3.12.* *_cp312 + license: Python-2.0 + size: 14557341 + timestamp: 1713208068012 +- kind: conda + name: python + version: 3.12.3 + build: h2628c8c_0_cpython + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/python-3.12.3-h2628c8c_0_cpython.conda + sha256: 1a95494abe572a8819c933f978df89f00bde72ea9432d46a70632599e8029ea4 + md5: f07c8c5dd98767f9a652de5d039b284e + depends: + - bzip2 >=1.0.8,<2.0a0 + - libexpat >=2.6.2,<3.0a0 + - libffi >=3.4,<4.0a0 + - libsqlite >=3.45.2,<4.0a0 + - libzlib >=1.2.13,<1.3.0a0 + - openssl >=3.2.1,<4.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 + - xz >=5.2.6,<6.0a0 + constrains: + - python_abi 3.12.* *_cp312 + license: Python-2.0 + size: 16179248 + timestamp: 1713205644673 +- kind: conda + name: python + version: 3.12.3 + build: h4a7b5fc_0_cpython + subdir: osx-arm64 + url: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.12.3-h4a7b5fc_0_cpython.conda + sha256: c761fb3713ea66bce3889b33b6f400afb2dd192d1fc2686446e9d8166cfcec6b + md5: 8643ab37bece6ae8f112464068d9df9c + depends: + - __osx >=11.0 + - bzip2 >=1.0.8,<2.0a0 + - libexpat >=2.6.2,<3.0a0 + - libffi >=3.4,<4.0a0 + - libsqlite >=3.45.2,<4.0a0 + - libzlib >=1.2.13,<1.3.0a0 + - ncurses >=6.4.20240210,<7.0a0 + - openssl >=3.2.1,<4.0a0 + - readline >=8.2,<9.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + - xz >=5.2.6,<6.0a0 + constrains: + - python_abi 3.12.* *_cp312 + license: Python-2.0 + size: 13207557 + timestamp: 1713206576646 +- kind: conda + name: python + version: 3.12.3 + build: hab00c5b_0_cpython + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.3-hab00c5b_0_cpython.conda + sha256: f9865bcbff69f15fd89a33a2da12ad616e98d65ce7c83c644b92e66e5016b227 + md5: 2540b74d304f71d3e89c81209db4db84 + depends: + - bzip2 >=1.0.8,<2.0a0 + - ld_impl_linux-64 >=2.36.1 + - libexpat >=2.6.2,<3.0a0 + - libffi >=3.4,<4.0a0 + - libgcc-ng >=12 + - libnsl >=2.0.1,<2.1.0a0 + - libsqlite >=3.45.2,<4.0a0 + - libuuid >=2.38.1,<3.0a0 + - libxcrypt >=4.4.36 + - libzlib >=1.2.13,<1.3.0a0 + - ncurses >=6.4.20240210,<7.0a0 + - openssl >=3.2.1,<4.0a0 + - readline >=8.2,<9.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + - xz >=5.2.6,<6.0a0 + constrains: + - python_abi 3.12.* *_cp312 + license: Python-2.0 + size: 31991381 + timestamp: 1713208036041 - kind: conda name: python-dateutil version: 2.9.0 @@ -4127,6 +4391,66 @@ packages: license_family: BSD size: 6751 timestamp: 1695147671006 +- kind: conda + name: python_abi + version: '3.12' + build: 4_cp312 + build_number: 4 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-4_cp312.conda + sha256: 182a329de10a4165f6e8a3804caf751f918f6ea6176dd4e5abcdae1ed3095bf6 + md5: dccc2d142812964fcc6abdc97b672dff + constrains: + - python 3.12.* *_cpython + license: BSD-3-Clause + license_family: BSD + size: 6385 + timestamp: 1695147396604 +- kind: conda + name: python_abi + version: '3.12' + build: 4_cp312 + build_number: 4 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.12-4_cp312.conda + sha256: 82c154d95c1637604671a02a89e72f1382e89a4269265a03506496bd928f6f14 + md5: 87201ac4314b911b74197e588cca3639 + constrains: + - python 3.12.* *_cpython + license: BSD-3-Clause + license_family: BSD + size: 6496 + timestamp: 1695147498447 +- kind: conda + name: python_abi + version: '3.12' + build: 4_cp312 + build_number: 4 + subdir: osx-arm64 + url: https://conda.anaconda.org/conda-forge/osx-arm64/python_abi-3.12-4_cp312.conda + sha256: db25428e4f24f8693ffa39f3ff6dfbb8fd53bc298764b775b57edab1c697560f + md5: bbb3a02c78b2d8219d7213f76d644a2a + constrains: + - python 3.12.* *_cpython + license: BSD-3-Clause + license_family: BSD + size: 6508 + timestamp: 1695147497048 +- kind: conda + name: python_abi + version: '3.12' + build: 4_cp312 + build_number: 4 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.12-4_cp312.conda + sha256: 488f8519d04b48f59bd6fde21ebe2d7a527718ff28aac86a8b53aa63658bdef6 + md5: 17f4ccf6be9ded08bd0a376f489ac1a6 + constrains: + - python 3.12.* *_cpython + license: BSD-3-Clause + license_family: BSD + size: 6785 + timestamp: 1695147430513 - kind: conda name: pytz version: '2024.1' @@ -4145,76 +4469,76 @@ packages: - kind: conda name: pyyaml version: 6.0.1 - build: py38h01eb140_1 + build: py312h02f2b3b_1 build_number: 1 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py38h01eb140_1.conda - sha256: 7741529957e3b3428af73f003f043c9983ed672c69dc4aafef848b2583c4571b - md5: 5f05353ae9a6c37e1b4aebc9f7834d23 + subdir: osx-arm64 + url: https://conda.anaconda.org/conda-forge/osx-arm64/pyyaml-6.0.1-py312h02f2b3b_1.conda + sha256: b6b4027b89c17b9bbd8089aec3e44bc29f802a7d5668d5a75b5358d7ed9705ca + md5: a0c843e52a1c4422d8657dd76e9eb994 depends: - - libgcc-ng >=12 - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - python >=3.12.0rc3,<3.13.0a0 + - python >=3.12.0rc3,<3.13.0a0 *_cpython + - python_abi 3.12.* *_cp312 - yaml >=0.2.5,<0.3.0a0 license: MIT license_family: MIT - size: 182153 - timestamp: 1695373618370 + size: 182705 + timestamp: 1695373895409 - kind: conda name: pyyaml version: 6.0.1 - build: py38h91455d4_1 + build: py312h104f124_1 build_number: 1 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.1-py38h91455d4_1.conda - sha256: 1cd8fe0f885c7e491b41e55611f546d011db8ac45941202eb2ef1549f6df0507 - md5: 4d9ea280b4f91fa5b0c0d34f2fce99cb + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0.1-py312h104f124_1.conda + sha256: 04aa180782cb675b960c0bf4aad439b4a7a08553c6af74d0b8e5df9a0c7cc4f4 + md5: 260ed90aaf06061edabd7209638cf03b depends: - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - python >=3.12.0rc3,<3.13.0a0 + - python_abi 3.12.* *_cp312 - yaml >=0.2.5,<0.3.0a0 license: MIT license_family: MIT - size: 151945 - timestamp: 1695373981322 + size: 185636 + timestamp: 1695373742454 - kind: conda name: pyyaml version: 6.0.1 - build: py38hb192615_1 + build: py312h98912ed_1 build_number: 1 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/pyyaml-6.0.1-py38hb192615_1.conda - sha256: a4bcd94eda8611ade946a52cb52cf60ca6aa4d69915a9c68a9d9b7cbf02e4ac0 - md5: 72ee6bc5ee0182fb7c5f26461504cbf5 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py312h98912ed_1.conda + sha256: 7f347a10a7121b08d79d21cd4f438c07c23479ea0c74dfb89d6dc416f791bb7f + md5: e3fd78d8d490af1d84763b9fe3f2e552 depends: - - python >=3.8,<3.9.0a0 - - python >=3.8,<3.9.0a0 *_cpython - - python_abi 3.8.* *_cp38 + - libgcc-ng >=12 + - python >=3.12.0rc3,<3.13.0a0 + - python_abi 3.12.* *_cp312 - yaml >=0.2.5,<0.3.0a0 license: MIT license_family: MIT - size: 158422 - timestamp: 1695373866893 + size: 196583 + timestamp: 1695373632212 - kind: conda name: pyyaml version: 6.0.1 - build: py38hcafd530_1 + build: py312he70551f_1 build_number: 1 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0.1-py38hcafd530_1.conda - sha256: cd1dceaa9bb8296ddea04cfb5e933bf5ab2b189c566bb55e1a3c9a38efffa82d - md5: 17cfcfdd18fa2fe701ff68c9bbcea9a5 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.1-py312he70551f_1.conda + sha256: a72fa8152791b4738432f270e70b3a9a4d583ef059a78aa1c62f4b4ab7b15494 + md5: f91e0baa89ba21166916624ba7bfb422 depends: - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - python >=3.12.0rc3,<3.13.0a0 + - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 - yaml >=0.2.5,<0.3.0a0 license: MIT license_family: MIT - size: 161848 - timestamp: 1695373748011 + size: 167932 + timestamp: 1695374097139 - kind: conda name: pyyaml-env-tag version: '0.1' @@ -4279,69 +4603,71 @@ packages: timestamp: 1679532707590 - kind: conda name: regex - version: 2024.4.16 - build: py38h01eb140_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/regex-2024.4.16-py38h01eb140_0.conda - sha256: 6be2e931308e4245aefaab5a4658de2353da895f3f4a860f9c672ce021063f7d - md5: ab3c16328ee4d9702eb90c56c8228450 + version: 2024.4.28 + build: py312h4389bb4_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/regex-2024.4.28-py312h4389bb4_0.conda + sha256: b16227dbc411267d9b23ea9bfc9778bbae4a8593b1e84fac71fb1cdf829d1d61 + md5: fc45482d8e83a3c85acb107385550166 depends: - - libgcc-ng >=12 - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 + - ucrt >=10.0.20348.0 + - vc >=14.2,<15 + - vc14_runtime >=14.29.30139 license: Python-2.0 license_family: PSF - size: 346393 - timestamp: 1713305283145 + size: 358313 + timestamp: 1714348290002 - kind: conda name: regex - version: 2024.4.16 - build: py38h336bac9_0 + version: 2024.4.28 + build: py312h4a164c9_0 subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/regex-2024.4.16-py38h336bac9_0.conda - sha256: c7cd0f88c7ccfa53d85f47881f4d4b100c64301a3e216e1d854739a00f5b52d4 - md5: 4c1ff46c475f150365b38560c0126928 + url: https://conda.anaconda.org/conda-forge/osx-arm64/regex-2024.4.28-py312h4a164c9_0.conda + sha256: a9cbb9201f987a1449634f70877939795c17fa58c50ff466191194e7ea955af1 + md5: bb22ba0467f1fd98413d2c1a3df76231 depends: - - python >=3.8,<3.9.0a0 - - python >=3.8,<3.9.0a0 *_cpython - - python_abi 3.8.* *_cp38 + - __osx >=11.0 + - python >=3.12,<3.13.0a0 + - python >=3.12,<3.13.0a0 *_cpython + - python_abi 3.12.* *_cp312 license: Python-2.0 license_family: PSF - size: 307898 - timestamp: 1713305521381 + size: 361720 + timestamp: 1714348060826 - kind: conda name: regex - version: 2024.4.16 - build: py38h91455d4_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/regex-2024.4.16-py38h91455d4_0.conda - sha256: 8d23a0e3f5c6dd727f62b7f9c47b28df6b305a562bee0dce42053afbe7c0b39a - md5: ff05c0626ba6b41b284639d6f23b192a + version: 2024.4.28 + build: py312h5fa3f64_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/regex-2024.4.28-py312h5fa3f64_0.conda + sha256: 9b4386d84b0511ad48ea1d208f177a388d2b10c2c8062850ee35f123738ba78e + md5: 025920a03909118fc0f208c1dcc62b94 depends: - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 - - ucrt >=10.0.20348.0 - - vc >=14.2,<15 - - vc14_runtime >=14.29.30139 + - __osx >=10.9 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 license: Python-2.0 license_family: PSF - size: 306076 - timestamp: 1713305834816 + size: 366832 + timestamp: 1714347981911 - kind: conda name: regex - version: 2024.4.16 - build: py38hae2e43d_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/regex-2024.4.16-py38hae2e43d_0.conda - sha256: f32549036335e1056746c1f501ffe9208de094be6efdf2656c09e3c24bd7517c - md5: 7a5e00f13c2862b7e0a53ad92e7dea9b + version: 2024.4.28 + build: py312h9a8786e_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/regex-2024.4.28-py312h9a8786e_0.conda + sha256: 3ee80b9a7bc73fe1a68feeb3eebedf19d6cc57f4181e5e7f75a772afb269f221 + md5: 39fbec9483427256beaec8b6104e52c0 depends: - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - libgcc-ng >=12 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 license: Python-2.0 license_family: PSF - size: 312856 - timestamp: 1713305547739 + size: 399284 + timestamp: 1714347894590 - kind: conda name: requests version: 2.31.0 @@ -4435,6 +4761,133 @@ packages: license_family: MIT size: 6343187 timestamp: 1712963346969 +- kind: conda + name: rust + version: 1.77.2 + build: h4ff7c5d_0 + subdir: osx-arm64 + url: https://conda.anaconda.org/conda-forge/osx-arm64/rust-1.77.2-h4ff7c5d_0.conda + sha256: 048ffabbbbd1b5109d59ec15610cf0e489c39b4f6f380953816bcb26dad8da17 + md5: 4083c1a9d7f5c9591273f578530d6388 + depends: + - rust-std-aarch64-apple-darwin 1.77.2 hf6ec828_0 + license: MIT + license_family: MIT + size: 145759919 + timestamp: 1712743398771 +- kind: conda + name: rust + version: 1.77.2 + build: h70c747d_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/rust-1.77.2-h70c747d_0.conda + sha256: 3b8cf09335d23c52d6e7150e4cc6d999ed4e2b3dc2307652f20e1a4669ff0846 + md5: ba764892e80fe0380bb7fa99751b186d + depends: + - gcc_impl_linux-64 + - libgcc-ng >=12 + - libzlib >=1.2.13,<1.3.0a0 + - rust-std-x86_64-unknown-linux-gnu 1.77.2 h2c6d0dc_0 + license: MIT + license_family: MIT + size: 186765686 + timestamp: 1712741423714 +- kind: conda + name: rust + version: 1.77.2 + build: h7e1429e_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/rust-1.77.2-h7e1429e_0.conda + sha256: d12cde3691eb50148b49460ac2bff0c0716204099a38d36132762ffb0c6c79fd + md5: 13c8a97dd157999cdd23adaac7919047 + depends: + - rust-std-x86_64-apple-darwin 1.77.2 h38e4360_0 + license: MIT + license_family: MIT + size: 192493395 + timestamp: 1712743664947 +- kind: conda + name: rust + version: 1.77.2 + build: hf8d6059_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/rust-1.77.2-hf8d6059_0.conda + sha256: 978228c14a3d2af2d9d52230443f232d7a22cbbe98d359a306b1a761773d4589 + md5: ba05fee8761e5bd25ae642a4b77d2ed7 + depends: + - rust-std-x86_64-pc-windows-msvc 1.77.2 h17fc481_0 + license: MIT + license_family: MIT + size: 187565499 + timestamp: 1712743189902 +- kind: conda + name: rust-std-aarch64-apple-darwin + version: 1.77.2 + build: hf6ec828_0 + subdir: noarch + noarch: generic + url: https://conda.anaconda.org/conda-forge/noarch/rust-std-aarch64-apple-darwin-1.77.2-hf6ec828_0.conda + sha256: 19b17ddca3896f12a640858b45a7ba5e8495ca07286b622535ca5a4bf8217906 + md5: 729f181cdeb249ff2da37f434b548633 + depends: + - __unix + constrains: + - rust >=1.77.2,<1.77.3.0a0 + license: MIT + license_family: MIT + size: 30933811 + timestamp: 1712740743456 +- kind: conda + name: rust-std-x86_64-apple-darwin + version: 1.77.2 + build: h38e4360_0 + subdir: noarch + noarch: generic + url: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-apple-darwin-1.77.2-h38e4360_0.conda + sha256: 1d0a99136ab0a2b05d9df4d5a7a8d665595c2e72ee1d19fcad0c6f1b402f37d1 + md5: 67db6d59468a8145fb076d75d156b69c + depends: + - __unix + constrains: + - rust >=1.77.2,<1.77.3.0a0 + license: MIT + license_family: MIT + size: 31857486 + timestamp: 1712740749097 +- kind: conda + name: rust-std-x86_64-pc-windows-msvc + version: 1.77.2 + build: h17fc481_0 + subdir: noarch + noarch: generic + url: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-pc-windows-msvc-1.77.2-h17fc481_0.conda + sha256: 0c290c52a3cf1ac43a316d6caf0e073614351ccae31c681d6953dec7a2ff21e3 + md5: 2149767f1c882154246a9a569991e3c3 + depends: + - __win + constrains: + - rust >=1.77.2,<1.77.3.0a0 + license: MIT + license_family: MIT + size: 25276039 + timestamp: 1712742986757 +- kind: conda + name: rust-std-x86_64-unknown-linux-gnu + version: 1.77.2 + build: h2c6d0dc_0 + subdir: noarch + noarch: generic + url: https://conda.anaconda.org/conda-forge/noarch/rust-std-x86_64-unknown-linux-gnu-1.77.2-h2c6d0dc_0.conda + sha256: 73f7537db6bc0471135a85a261798abe77e7e83794f945a0355c4068973f31f6 + md5: db8b81b3806faafe2f6f7bd431f72e37 + depends: + - __unix + constrains: + - rust >=1.77.2,<1.77.3.0a0 + license: MIT + license_family: MIT + size: 33827015 + timestamp: 1712741238767 - kind: conda name: setuptools version: 69.5.1 @@ -4465,6 +4918,22 @@ packages: license_family: MIT size: 14259 timestamp: 1620240338595 +- kind: conda + name: sysroot_linux-64 + version: '2.12' + build: he073ed8_17 + build_number: 17 + subdir: noarch + noarch: generic + url: https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.12-he073ed8_17.conda + sha256: b4e4d685e41cb36cfb16f0cb15d2c61f8f94f56fab38987a44eff95d8a673fb5 + md5: 595db67e32b276298ff3d94d07d47fbf + depends: + - kernel-headers_linux-64 2.6.32 he073ed8_17 + license: LGPL-2.0-or-later AND LGPL-2.0-or-later WITH exceptions AND GPL-2.0-or-later AND MPL-2.0 + license_family: GPL + size: 15127123 + timestamp: 1708000843849 - kind: conda name: tinycss2 version: 1.3.0 @@ -4589,6 +5058,18 @@ packages: license_family: PSF size: 37583 timestamp: 1712330089194 +- kind: conda + name: tzdata + version: 2024a + build: h0c530f3_0 + subdir: noarch + noarch: generic + url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda + sha256: 7b2b69c54ec62a243eb6fba2391b5e443421608c3ae5dbff938ad33ca8db5122 + md5: 161081fc7cec0bfda0d86d7cb595f8d8 + license: LicenseRef-Public-Domain + size: 119815 + timestamp: 1706886945727 - kind: conda name: ucrt version: 10.0.22621.0 @@ -4672,68 +5153,68 @@ packages: - kind: conda name: watchdog version: 4.0.0 - build: py38h2bea1e5_0 - subdir: osx-64 - url: https://conda.anaconda.org/conda-forge/osx-64/watchdog-4.0.0-py38h2bea1e5_0.conda - sha256: e8505e3d0453cc290f11c96ba5d56330be2a021b6848164811f37fe49828d7d7 - md5: 0d0a92d705e8a42ea70651eb6f3e2e8c + build: py312h2e8e312_0 + subdir: win-64 + url: https://conda.anaconda.org/conda-forge/win-64/watchdog-4.0.0-py312h2e8e312_0.conda + sha256: 4b1eeaecccadf55a5c322e25290d75c8bed7b0d5e25fa6dfa03fc16fc9919fc4 + md5: 186ec4486a2c5d738c002067665b50be depends: - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 - pyyaml >=3.10 license: Apache-2.0 license_family: APACHE - size: 118056 - timestamp: 1707295680540 + size: 152911 + timestamp: 1707295573907 - kind: conda name: watchdog version: 4.0.0 - build: py38h336bac9_0 - subdir: osx-arm64 - url: https://conda.anaconda.org/conda-forge/osx-arm64/watchdog-4.0.0-py38h336bac9_0.conda - sha256: 8acd6290c7f09b5b791eb9eeb43508eaed5aa22b75f81aad01e7f8287f98a77d - md5: 6987169f47c17148af8b2073726810b8 + build: py312h7900ff3_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/watchdog-4.0.0-py312h7900ff3_0.conda + sha256: db3ef9753934826c008216b198f04a6637150e1d91d72733148c0822e4a042a2 + md5: 1b87b82dd803565550e6358c0790f3d2 depends: - - python >=3.8,<3.9.0a0 - - python >=3.8,<3.9.0a0 *_cpython - - python_abi 3.8.* *_cp38 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 - pyyaml >=3.10 license: Apache-2.0 license_family: APACHE - size: 118848 - timestamp: 1707295730630 + size: 136845 + timestamp: 1707295261797 - kind: conda name: watchdog version: 4.0.0 - build: py38h578d9bd_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/watchdog-4.0.0-py38h578d9bd_0.conda - sha256: f6a07da2d18ed7366e2b07832de4626cdd7a82d2c42d64866c20decbc10996b0 - md5: fd6f9afe747e1ec3744158e83728fc0b + build: py312hc2c2f20_0 + subdir: osx-64 + url: https://conda.anaconda.org/conda-forge/osx-64/watchdog-4.0.0-py312hc2c2f20_0.conda + sha256: f333e1f11d60e096d8b0f2b7dbe313fc9ee22d6c09f0a0cc7d3c9fed56ee48dd + md5: ebd7ea0d23052393f0a62efe8a508e99 depends: - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - python >=3.12,<3.13.0a0 + - python_abi 3.12.* *_cp312 - pyyaml >=3.10 license: Apache-2.0 license_family: APACHE - size: 110414 - timestamp: 1707295340662 + size: 144711 + timestamp: 1707295580304 - kind: conda name: watchdog version: 4.0.0 - build: py38haa244fe_0 - subdir: win-64 - url: https://conda.anaconda.org/conda-forge/win-64/watchdog-4.0.0-py38haa244fe_0.conda - sha256: 53248a9bc2f0cead364d18fde7f3e169bdd8ea1284dd71e04684bd45601dcad2 - md5: 6c76373804c8f4b37dec9b23522d9624 + build: py312he37b823_0 + subdir: osx-arm64 + url: https://conda.anaconda.org/conda-forge/osx-arm64/watchdog-4.0.0-py312he37b823_0.conda + sha256: 3e7486e161e4478a1bb63cb124a446b21b0af113458522d215ba76eebb1a473a + md5: c483c04540c229b50564201c5432667c depends: - - python >=3.8,<3.9.0a0 - - python_abi 3.8.* *_cp38 + - python >=3.12,<3.13.0a0 + - python >=3.12,<3.13.0a0 *_cpython + - python_abi 3.12.* *_cp312 - pyyaml >=3.10 license: Apache-2.0 license_family: APACHE - size: 127122 - timestamp: 1707295617098 + size: 145347 + timestamp: 1707295575866 - kind: conda name: webencodings version: 0.5.1 diff --git a/py-rattler/pixi.toml b/py-rattler/pixi.toml index b6945b1a0..7b7cccece 100644 --- a/py-rattler/pixi.toml +++ b/py-rattler/pixi.toml @@ -16,6 +16,7 @@ license = "BSD-3-Clause" [feature.build.dependencies] maturin = "~=1.2.2" pip = "~=23.2.1" +rust = "~=1.77" [feature.build.tasks] build = "PIP_REQUIRE_VIRTUALENV=false maturin develop" @@ -54,7 +55,8 @@ fmt-check = { depends_on = ["fmt-python-check", "fmt-rust-check"] } mkdocs = "1.5.3.*" mkdocstrings-python = ">=1.9.0,<1.10" mkdocstrings = ">=0.24.1,<0.25" -mkdocs-material = ">=9.5.17" +mkdocs-material = ">=9.5.20" +mdx_truly_sane_lists = ">=1.3,<2" cairosvg = "2.7.1.*" pillow = ">=9.4.0" @@ -63,6 +65,6 @@ docs = { cmd = "mkdocs serve" } build-docs = { cmd = "mkdocs build" } [environments] -build = [ "build" ] -test = [ "test" ] -docs = [ "docs" ] +build = { features = ["build"], solve-group = "default" } +test = { features = ["build", "test"], solve-group = "default" } +docs = ["docs"] diff --git a/py-rattler/rattler/__init__.py b/py-rattler/rattler/__init__.py index d3bbe3cf9..2f317e34c 100644 --- a/py-rattler/rattler/__init__.py +++ b/py-rattler/rattler/__init__.py @@ -6,6 +6,8 @@ RepoDataRecord, PatchInstructions, SparseRepoData, + Gateway, + SourceConfig, ) from rattler.channel import Channel, ChannelConfig, ChannelPriority from rattler.networking import AuthenticatedClient, fetch_repo_data @@ -19,9 +21,9 @@ PathType, PrefixPlaceholder, FileMode, + IndexJson, ) from rattler.prefix import PrefixRecord, PrefixPaths, PrefixPathsEntry, PrefixPathType -from rattler.solver import solve from rattler.platform import Platform from rattler.utils.rattler_version import get_rattler_version as _get_rattler_version from rattler.linker import link @@ -35,6 +37,7 @@ PypiPackageData, PypiPackageEnvironmentData, ) +from rattler.solver import solve __version__ = _get_rattler_version() del _get_rattler_version @@ -80,4 +83,6 @@ "PrefixPlaceholder", "FileMode", "IndexJson", + "Gateway", + "SourceConfig", ] diff --git a/py-rattler/rattler/exceptions.py b/py-rattler/rattler/exceptions.py index dd9b1599a..954789ca4 100644 --- a/py-rattler/rattler/exceptions.py +++ b/py-rattler/rattler/exceptions.py @@ -19,6 +19,7 @@ VersionBumpError, EnvironmentCreationError, ExtractError, + GatewayError, ) except ImportError: # They are only redefined for documentation purposes @@ -81,6 +82,9 @@ class EnvironmentCreationError(Exception): # type: ignore[no-redef] class ExtractError(Exception): # type: ignore[no-redef] """An error that can occur when extracting an archive.""" + class GatewayError(Exception): # type: ignore[no-redef] + """An error that can occur when querying the repodata gateway.""" + __all__ = [ "ActivationError", @@ -102,4 +106,5 @@ class ExtractError(Exception): # type: ignore[no-redef] "VersionBumpError", "EnvironmentCreationError", "ExtractError", + "GatewayError", ] diff --git a/py-rattler/rattler/platform/__init__.py b/py-rattler/rattler/platform/__init__.py index 06cf2e856..4a56a4f6e 100644 --- a/py-rattler/rattler/platform/__init__.py +++ b/py-rattler/rattler/platform/__init__.py @@ -1,4 +1,4 @@ -from rattler.platform.platform import Platform +from rattler.platform.platform import Platform, PlatformLiteral from rattler.platform.arch import Arch -__all__ = ["Platform", "Arch"] +__all__ = ["Platform", "PlatformLiteral", "Arch"] diff --git a/py-rattler/rattler/repo_data/__init__.py b/py-rattler/rattler/repo_data/__init__.py index 9512b7097..2cf581994 100644 --- a/py-rattler/rattler/repo_data/__init__.py +++ b/py-rattler/rattler/repo_data/__init__.py @@ -3,6 +3,7 @@ from rattler.repo_data.patch_instructions import PatchInstructions from rattler.repo_data.record import RepoDataRecord from rattler.repo_data.sparse import SparseRepoData +from rattler.repo_data.gateway import Gateway, SourceConfig __all__ = [ "PackageRecord", @@ -10,4 +11,6 @@ "PatchInstructions", "RepoDataRecord", "SparseRepoData", + "Gateway", + "SourceConfig", ] diff --git a/py-rattler/rattler/repo_data/gateway.py b/py-rattler/rattler/repo_data/gateway.py new file mode 100644 index 000000000..b1b01f9aa --- /dev/null +++ b/py-rattler/rattler/repo_data/gateway.py @@ -0,0 +1,188 @@ +from __future__ import annotations + +import os +from typing import Optional, List, Literal +from dataclasses import dataclass + +from rattler.rattler import PyGateway, PySourceConfig, PyMatchSpec + +from rattler.channel import Channel +from rattler.match_spec import MatchSpec +from rattler.repo_data.record import RepoDataRecord +from rattler.platform import Platform, PlatformLiteral +from rattler.package.package_name import PackageName + +CacheAction = Literal["cache-or-fetch", "use-cache-only", "force-cache-only", "no-cache"] + + +@dataclass +class SourceConfig: + """ + Describes properties about a channel. + + This can be used to configure the Gateway to handle channels in a certain + way. + """ + + jlap_enabled: bool = True + """Whether the JLAP compression is enabled or not.""" + + zstd_enabled: bool = True + """Whether the ZSTD compression is enabled or not.""" + + bz2_enabled: bool = True + """Whether the BZ2 compression is enabled or not.""" + + cache_action: CacheAction = "cache-or-fetch" + """How to interact with the cache. + + * `'cache-or-fetch'` (default): Use the cache if its up to date or fetch from the URL if there is no valid cached value. + * `'use-cache-only'`: Only use the cache, but error out if the cache is not up to date + * `'force-cache-only'`: Only use the cache, ignore whether or not it is up to date. + * `'no-cache'`: Do not use the cache even if there is an up to date entry + """ + + def _into_py(self) -> PySourceConfig: + """ + Converts this object into a type that can be used by the Rust code. + + Examples + -------- + ```python + >>> SourceConfig()._into_py() # doctest: +ELLIPSIS + + >>> + ``` + """ + return PySourceConfig( + jlap_enabled=self.jlap_enabled, + zstd_enabled=self.zstd_enabled, + bz2_enabled=self.bz2_enabled, + cache_action=self.cache_action, + ) + + +class Gateway: + """ + The gateway manages all the quircks and complex bits of efficiently acquiring + repodata. It implements all the necessary logic to fetch the repodata from a + remote server, cache it locally and convert it into python objects. + + The gateway can also easily be used concurrently, as it is designed to be + thread-safe. When two threads are querying the same channel at the same time, + their requests are coallesced into a single request. This is done to reduce the + number of requests made to the remote server and reduce the overal memory usage. + + The gateway caches the repodata internally, so if the same channel is queried + multiple times the records will only be fetched once. However, the conversion + of the records to a python object is done every time the query method is called. + Therefor, instead of requesting records directly, its more efficient to pass the + gateway itself to methods that accepts it. + """ + + def __init__( + self, + cache_dir: Optional[os.PathLike[str]] = None, + default_config: Optional[SourceConfig] = None, + per_channel_config: Optional[dict[Channel | str, SourceConfig]] = None, + max_concurrent_requests: int = 100, + ) -> None: + """ + Arguments: + cache_dir: The directory where the repodata should be cached. If not specified the + default cache directory is used. + default_config: The default configuration for channels. + per_channel_config: Per channel configuration. + max_concurrent_requests: The maximum number of concurrent requests that can be made. + + Examples + -------- + ```python + >>> Gateway() + Gateway() + >>> + ``` + """ + default_config = default_config or SourceConfig() + + self._gateway = PyGateway( + cache_dir=cache_dir, + default_config=default_config._into_py(), + per_channel_config={ + channel._channel if isinstance(channel, Channel) else Channel(channel)._channel: config._into_py() + for channel, config in (per_channel_config or {}).items() + }, + max_concurrent_requests=max_concurrent_requests, + ) + + async def query( + self, + channels: List[Channel | str], + platforms: List[Platform | PlatformLiteral], + specs: List[MatchSpec | PackageName | str], + recursive: bool = True, + ) -> List[List[RepoDataRecord]]: + """Queries the gateway for repodata. + + If `recursive` is `True` the gateway will recursively fetch the dependencies of the + encountered records. If `recursive` is `False` only the records with the package names + specified in `specs` are returned. + + The `specs` can either be a `MatchSpec`, `PackageName` or a string. If a string or a + `PackageName` is provided it will be converted into a MatchSpec that matches any record + with the given name. If a `MatchSpec` is provided all records that match the name + specified in the spec will be returned, but only the dependencies of the records + that match the entire spec are recursively fetched. + + The gateway caches the records internally, so if the same channel is queried multiple + times the records will only be fetched once. However, the conversion of the records to + a python object is done every time the query method is called. + + Arguments: + channels: The channels to query. + platforms: The platforms to query. + specs: The specs to query. + recursive: Whether recursively fetch dependencies or not. + + Returns: + A list of lists of `RepoDataRecord`s. The outer list contains the results for each + channel in the same order they are provided in the `channels` argument. + + Examples + -------- + ```python + >>> import asyncio + >>> gateway = Gateway() + >>> records = asyncio.run(gateway.query(["conda-forge"], ["linux-aarch64"], ["python"])) + >>> assert len(records) == 1 + >>> + ``` + """ + py_records = await self._gateway.query( + channels=[ + channel._channel if isinstance(channel, Channel) else Channel(channel)._channel for channel in channels + ], + platforms=[ + platform._inner if isinstance(platform, Platform) else Platform(platform)._inner + for platform in platforms + ], + specs=[spec._match_spec if isinstance(spec, MatchSpec) else PyMatchSpec(str(spec), True) for spec in specs], + recursive=recursive, + ) + + # Convert the records into python objects + return [[RepoDataRecord._from_py_record(record) for record in records] for records in py_records] + + def __repr__(self) -> str: + """ + Returns a representation of the Gateway. + + Examples + -------- + ```python + >>> Gateway() + Gateway() + >>> + ``` + """ + return f"{type(self).__name__}()" diff --git a/py-rattler/rattler/solver/solver.py b/py-rattler/rattler/solver/solver.py index bf25b5c69..21629ea7b 100644 --- a/py-rattler/rattler/solver/solver.py +++ b/py-rattler/rattler/solver/solver.py @@ -1,18 +1,24 @@ from __future__ import annotations import datetime from typing import List, Optional + +from rattler import Channel, Platform from rattler.match_spec.match_spec import MatchSpec from rattler.channel import ChannelPriority -from rattler.rattler import py_solve +from rattler.rattler import py_solve, PyMatchSpec + +from rattler.platform.platform import PlatformLiteral +from rattler.repo_data.gateway import Gateway from rattler.repo_data.record import RepoDataRecord -from rattler.repo_data.sparse import SparseRepoData from rattler.virtual_package.generic import GenericVirtualPackage -def solve( - specs: List[MatchSpec], - available_packages: List[SparseRepoData], +async def solve( + channels: List[Channel | str], + platforms: List[Platform | PlatformLiteral], + specs: List[MatchSpec | str], + gateway: Gateway, locked_packages: Optional[List[RepoDataRecord]] = None, pinned_packages: Optional[List[RepoDataRecord]] = None, virtual_packages: Optional[List[GenericVirtualPackage]] = None, @@ -25,7 +31,9 @@ def solve( Arguments: specs: A list of matchspec to solve. - available_packages: A list of RepoData to use for solving the `specs`. + channels: The channels to query for the packages. + platforms: The platforms to query for the packages. + gateway: The gateway to use for acquiring repodata. locked_packages: Records of packages that are previously selected. If the solver encounters multiple variants of a single package (identified by its name), it will sort the records @@ -46,6 +54,7 @@ def solve( the channel that the package is first found in will be used as the only channel for that package. When `ChannelPriority.Disabled` it will search for every package in every channel. + timeout: The maximum time the solver is allowed to run. Returns: Resolved list of `RepoDataRecord`s. @@ -53,13 +62,20 @@ def solve( return [ RepoDataRecord._from_py_record(solved_package) - for solved_package in py_solve( - [spec._match_spec for spec in specs], - [package._sparse for package in available_packages], - [package._record for package in locked_packages or []], - [package._record for package in pinned_packages or []], - [v_package._generic_virtual_package for v_package in virtual_packages or []], - channel_priority.value, - timeout.microseconds if timeout else None, + for solved_package in await py_solve( + channels=[ + channel._channel if isinstance(channel, Channel) else Channel(channel)._channel for channel in channels + ], + platforms=[ + platform._inner if isinstance(platform, Platform) else Platform(platform)._inner + for platform in platforms + ], + specs=[spec._match_spec if isinstance(spec, MatchSpec) else PyMatchSpec(str(spec), True) for spec in specs], + gateway=gateway._gateway, + locked_packages=[package._record for package in locked_packages or []], + pinned_packages=[package._record for package in pinned_packages or []], + virtual_packages=[v_package._generic_virtual_package for v_package in virtual_packages or []], + channel_priority=channel_priority.value, + timeout=timeout.microseconds if timeout else None, ) ] diff --git a/py-rattler/src/channel/mod.rs b/py-rattler/src/channel/mod.rs index 27bf578a3..2b9c012ea 100644 --- a/py-rattler/src/channel/mod.rs +++ b/py-rattler/src/channel/mod.rs @@ -39,7 +39,7 @@ impl PyChannelConfig { #[pyclass] #[repr(transparent)] -#[derive(Clone)] +#[derive(Clone, Hash, Eq, PartialEq)] pub struct PyChannel { pub(crate) inner: Channel, } @@ -79,7 +79,7 @@ impl PyChannel { /// Returns the Urls for the given platform. pub fn platform_url(&self, platform: &PyPlatform) -> String { - self.inner.platform_url(platform.clone().into()).into() + self.inner.platform_url((*platform).into()).into() } } diff --git a/py-rattler/src/error.rs b/py-rattler/src/error.rs index e57d08659..2d048c0c9 100644 --- a/py-rattler/src/error.rs +++ b/py-rattler/src/error.rs @@ -10,6 +10,7 @@ use rattler_conda_types::{ use rattler_lock::{ConversionError, ParseCondaLockError}; use rattler_package_streaming::ExtractError; use rattler_repodata_gateway::fetch::FetchRepoDataError; +use rattler_repodata_gateway::GatewayError; use rattler_shell::activation::ActivationError; use rattler_solve::SolveError; use rattler_virtual_packages::DetectVirtualPackageError; @@ -64,6 +65,8 @@ pub enum PyRattlerError { ExtractError(#[from] ExtractError), #[error(transparent)] ActivationScriptFormatError(std::fmt::Error), + #[error(transparent)] + GatewayError(#[from] GatewayError), } impl From for PyErr { @@ -114,6 +117,7 @@ impl From for PyErr { PyRattlerError::ActivationScriptFormatError(err) => { ActivationScriptFormatException::new_err(err.to_string()) } + PyRattlerError::GatewayError(err) => GatewayException::new_err(err.to_string()), } } } @@ -141,3 +145,4 @@ create_exception!(exceptions, RequirementException, PyException); create_exception!(exceptions, EnvironmentCreationException, PyException); create_exception!(exceptions, ExtractException, PyException); create_exception!(exceptions, ActivationScriptFormatException, PyException); +create_exception!(exceptions, GatewayException, PyException); diff --git a/py-rattler/src/lib.rs b/py-rattler/src/lib.rs index 629a8c05a..583fda306 100644 --- a/py-rattler/src/lib.rs +++ b/py-rattler/src/lib.rs @@ -23,6 +23,7 @@ mod virtual_package; mod index_json; mod run_exports_json; + use about_json::PyAboutJson; use channel::{PyChannel, PyChannelConfig, PyChannelPriority}; use error::{ @@ -46,12 +47,19 @@ use no_arch_type::PyNoArchType; use package_name::PyPackageName; use paths_json::{PyFileMode, PyPathType, PyPathsEntry, PyPathsJson, PyPrefixPlaceholder}; use prefix_paths::{PyPrefixPathType, PyPrefixPaths, PyPrefixPathsEntry}; -use repo_data::{patch_instructions::PyPatchInstructions, sparse::PySparseRepoData, PyRepoData}; +use repo_data::{ + gateway::{PyGateway, PySourceConfig}, + patch_instructions::PyPatchInstructions, + sparse::PySparseRepoData, + PyRepoData, +}; use run_exports_json::PyRunExportsJson; +use std::ops::Deref; use version::PyVersion; use pyo3::prelude::*; +use crate::error::GatewayException; use index::py_index; use linker::py_link; use meta::get_rattler_version; @@ -61,6 +69,18 @@ use shell::{PyActivationResult, PyActivationVariables, PyActivator, PyShellEnum} use solver::py_solve; use virtual_package::PyVirtualPackage; +/// A struct to make it easy to wrap a type as a python type. +#[repr(transparent)] +#[derive(Clone)] +pub struct Wrap(pub T); + +impl Deref for Wrap { + type Target = T; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + #[pymodule] fn rattler(py: Python<'_>, m: &PyModule) -> PyResult<()> { m.add_class::().unwrap(); @@ -87,6 +107,8 @@ fn rattler(py: Python<'_>, m: &PyModule) -> PyResult<()> { m.add_class::().unwrap(); m.add_class::().unwrap(); m.add_class::().unwrap(); + m.add_class::().unwrap(); + m.add_class::().unwrap(); m.add_class::().unwrap(); @@ -194,5 +216,8 @@ fn rattler(py: Python<'_>, m: &PyModule) -> PyResult<()> { m.add("ExtractError", py.get_type::()) .unwrap(); + m.add("GatewayError", py.get_type::()) + .unwrap(); + Ok(()) } diff --git a/py-rattler/src/linker.rs b/py-rattler/src/linker.rs index fc5e90a39..67ca338b6 100644 --- a/py-rattler/src/linker.rs +++ b/py-rattler/src/linker.rs @@ -199,7 +199,7 @@ pub async fn install_package_to_environment( }; let target_prefix = target_prefix.clone(); - match tokio::task::spawn_blocking(move || { + let write_prefix_fut = tokio::task::spawn_blocking(move || { let conda_meta_path = target_prefix.join("conda-meta"); std::fs::create_dir_all(&conda_meta_path)?; @@ -215,8 +215,8 @@ pub async fn install_package_to_environment( )); prefix_record.write_to_path(pkg_meta_path, true) }) - .await - { + .await; + match write_prefix_fut { Ok(result) => Ok(result?), Err(err) => { if let Ok(panic) = err.try_into_panic() { diff --git a/py-rattler/src/networking/mod.rs b/py-rattler/src/networking/mod.rs index 703444331..87b5ea1e7 100644 --- a/py-rattler/src/networking/mod.rs +++ b/py-rattler/src/networking/mod.rs @@ -3,17 +3,18 @@ use pyo3::{pyfunction, types::PyTuple, Py, PyAny, PyResult, Python, ToPyObject}; use pyo3_asyncio::tokio::future_into_py; use rattler_repodata_gateway::fetch::{ - fetch_repo_data, CachedRepoData, DownloadProgress, FetchRepoDataError, FetchRepoDataOptions, + fetch_repo_data, CachedRepoData, FetchRepoDataError, FetchRepoDataOptions, }; use url::Url; -use std::{path::PathBuf, str::FromStr}; +use std::{path::PathBuf, str::FromStr, sync::Arc}; use crate::{ channel::PyChannel, error::PyRattlerError, platform::PyPlatform, repo_data::sparse::PySparseRepoData, }; use authenticated_client::PyAuthenticatedClient; +use rattler_repodata_gateway::Reporter; pub mod authenticated_client; pub mod cached_repo_data; @@ -32,12 +33,11 @@ pub fn py_fetch_repo_data<'a>( let client = PyAuthenticatedClient::new(); for (subdir, chan) in get_subdir_urls(channels, platforms)? { - let progress = if let Some(callback) = callback { - let callback = callback.to_object(py); - Some(get_progress_func(callback)) - } else { - None - }; + let callback = callback.map(|callback| { + Arc::new(ProgressReporter { + callback: callback.to_object(py), + }) as _ + }); let cache_path = cache_path.clone(); let client = client.clone(); @@ -49,7 +49,7 @@ pub fn py_fetch_repo_data<'a>( client.into(), cache_path, FetchRepoDataOptions::default(), - progress, + callback, ) .await?, chan, @@ -72,14 +72,23 @@ pub fn py_fetch_repo_data<'a>( }) } -/// Creates a closure to show progress of Download -fn get_progress_func(callback: Py) -> Box { - Box::new(move |progress: DownloadProgress| { +struct ProgressReporter { + callback: Py, +} + +impl Reporter for ProgressReporter { + fn on_download_progress( + &self, + _url: &Url, + _index: usize, + bytes_downloaded: usize, + total_bytes: Option, + ) { Python::with_gil(|py| { - let args = PyTuple::new(py, [Some(progress.bytes), progress.total]); - callback.call1(py, args).expect("Callback failed!"); + let args = PyTuple::new(py, [Some(bytes_downloaded), total_bytes]); + self.callback.call1(py, args).expect("Callback failed!"); }); - }) + } } /// Creates a subdir urls out of channels and channels. diff --git a/py-rattler/src/platform.rs b/py-rattler/src/platform.rs index f1b024b9c..06527d7ae 100644 --- a/py-rattler/src/platform.rs +++ b/py-rattler/src/platform.rs @@ -53,7 +53,8 @@ impl PyArch { /////////////////////////// #[pyclass] -#[derive(Clone, Eq, PartialEq, Hash)] +#[repr(transparent)] +#[derive(Clone, Copy, Eq, PartialEq, Hash, Ord, PartialOrd)] pub struct PyPlatform { pub inner: Platform, } @@ -70,18 +71,6 @@ impl From for Platform { } } -impl PartialOrd for PyPlatform { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for PyPlatform { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.inner.cmp(&other.inner) - } -} - impl FromStr for PyPlatform { type Err = PyRattlerError; diff --git a/py-rattler/src/repo_data/gateway.rs b/py-rattler/src/repo_data/gateway.rs new file mode 100644 index 000000000..9cf8fbf1e --- /dev/null +++ b/py-rattler/src/repo_data/gateway.rs @@ -0,0 +1,148 @@ +use crate::error::PyRattlerError; +use crate::match_spec::PyMatchSpec; +use crate::platform::PyPlatform; +use crate::record::PyRecord; +use crate::{PyChannel, Wrap}; +use pyo3::exceptions::PyValueError; +use pyo3::{pyclass, pymethods, FromPyObject, PyAny, PyResult, Python}; +use pyo3_asyncio::tokio::future_into_py; +use rattler_repodata_gateway::fetch::CacheAction; +use rattler_repodata_gateway::{ChannelConfig, Gateway, SourceConfig}; +use std::collections::HashMap; +use std::path::PathBuf; + +#[pyclass] +#[repr(transparent)] +#[derive(Clone)] +pub struct PyGateway { + pub(crate) inner: Gateway, +} + +impl From for Gateway { + fn from(value: PyGateway) -> Self { + value.inner + } +} + +impl From for PyGateway { + fn from(value: Gateway) -> Self { + Self { inner: value } + } +} + +#[pymethods] +impl PyGateway { + #[new] + pub fn new( + max_concurrent_requests: usize, + default_config: PySourceConfig, + per_channel_config: HashMap, + cache_dir: Option, + ) -> PyResult { + let channel_config = ChannelConfig { + default: default_config.into(), + per_channel: per_channel_config + .into_iter() + .map(|(k, v)| (k.into(), v.into())) + .collect(), + }; + + let mut gateway = Gateway::builder() + .with_max_concurrent_requests(max_concurrent_requests) + .with_channel_config(channel_config); + + if let Some(cache_dir) = cache_dir { + gateway.set_cache_dir(cache_dir); + } + + Ok(Self { + inner: gateway.finish(), + }) + } + + pub fn query<'a>( + &self, + py: Python<'a>, + channels: Vec, + platforms: Vec, + specs: Vec, + recursive: bool, + ) -> PyResult<&'a PyAny> { + let gateway = self.inner.clone(); + future_into_py(py, async move { + let repodatas = gateway + .query(channels, platforms.into_iter().map(|p| p.inner), specs) + .recursive(recursive) + .execute() + .await + .map_err(PyRattlerError::from)?; + + // Convert the records into a list of lists + Ok(repodatas + .into_iter() + .map(|r| { + r.into_iter() + .cloned() + .map(PyRecord::from) + .collect::>() + }) + .collect::>()) + }) + } +} + +#[pyclass] +#[repr(transparent)] +#[derive(Clone)] +pub struct PySourceConfig { + pub(crate) inner: SourceConfig, +} + +impl From for SourceConfig { + fn from(value: PySourceConfig) -> Self { + value.inner + } +} + +impl From for PySourceConfig { + fn from(value: SourceConfig) -> Self { + Self { inner: value } + } +} + +impl FromPyObject<'_> for Wrap { + fn extract(ob: &'_ PyAny) -> PyResult { + let parsed = match &*ob.extract::()? { + "cache-or-fetch" => CacheAction::CacheOrFetch, + "use-cache-only" => CacheAction::UseCacheOnly, + "force-cache-only" => CacheAction::ForceCacheOnly, + "no-cache" => CacheAction::NoCache, + v => { + return Err(PyValueError::new_err(format!( + "cache action must be one of {{'cache-or-fetch', 'use-cache-only', 'force-cache-only', 'no-cache'}}, got {v}", + ))) + }, + }; + Ok(Wrap(parsed)) + } +} + +#[pymethods] +impl PySourceConfig { + #[new] + pub fn new( + jlap_enabled: bool, + zstd_enabled: bool, + bz2_enabled: bool, + cache_action: Wrap, + ) -> Self { + Self { + inner: SourceConfig { + jlap_enabled, + zstd_enabled, + bz2_enabled, + cache_action: cache_action.0, + }, + } + } +} diff --git a/py-rattler/src/repo_data/mod.rs b/py-rattler/src/repo_data/mod.rs index 7d567bcc7..56e64821b 100644 --- a/py-rattler/src/repo_data/mod.rs +++ b/py-rattler/src/repo_data/mod.rs @@ -7,6 +7,7 @@ use crate::{channel::PyChannel, error::PyRattlerError, record::PyRecord}; use patch_instructions::PyPatchInstructions; +pub mod gateway; pub mod patch_instructions; pub mod sparse; diff --git a/py-rattler/src/solver.rs b/py-rattler/src/solver.rs index d21603923..fa2aae5a7 100644 --- a/py-rattler/src/solver.rs +++ b/py-rattler/src/solver.rs @@ -1,55 +1,79 @@ -use pyo3::{pyfunction, PyResult, Python}; -use rattler_repodata_gateway::sparse::SparseRepoData; -use rattler_solve::{resolvo::Solver, SolverImpl, SolverTask}; +use pyo3::{pyfunction, PyAny, PyErr, PyResult, Python}; +use pyo3_asyncio::tokio::future_into_py; +use rattler_solve::{resolvo::Solver, RepoDataIter, SolverImpl, SolverTask}; +use tokio::task::JoinError; +use crate::channel::PyChannel; +use crate::platform::PyPlatform; +use crate::repo_data::gateway::PyGateway; use crate::{ channel::PyChannelPriority, error::PyRattlerError, generic_virtual_package::PyGenericVirtualPackage, match_spec::PyMatchSpec, record::PyRecord, - repo_data::sparse::PySparseRepoData, }; #[allow(clippy::too_many_arguments)] #[pyfunction] pub fn py_solve( py: Python<'_>, + channels: Vec, + platforms: Vec, specs: Vec, - available_packages: Vec, + gateway: PyGateway, locked_packages: Vec, pinned_packages: Vec, virtual_packages: Vec, channel_priority: PyChannelPriority, timeout: Option, -) -> PyResult> { - py.allow_threads(move || { - let package_names = specs - .iter() - .filter_map(|match_spec| match_spec.inner.name.clone()); +) -> PyResult<&'_ PyAny> { + future_into_py(py, async move { + let available_packages = gateway + .inner + .query( + channels.into_iter(), + platforms.into_iter().map(Into::into), + specs.clone().into_iter(), + ) + .recursive(true) + .execute() + .await + .map_err(PyRattlerError::from)?; - let available_packages = SparseRepoData::load_records_recursive( - available_packages.iter().map(Into::into), - package_names, - None, - )?; + let solve_result = tokio::task::spawn_blocking(move || { + let task = SolverTask { + available_packages: available_packages + .iter() + .map(RepoDataIter) + .collect::>(), + locked_packages: locked_packages + .into_iter() + .map(TryInto::try_into) + .collect::>>()?, + pinned_packages: pinned_packages + .into_iter() + .map(TryInto::try_into) + .collect::>>()?, + virtual_packages: virtual_packages.into_iter().map(Into::into).collect(), + specs: specs.into_iter().map(Into::into).collect(), + timeout: timeout.map(std::time::Duration::from_micros), + channel_priority: channel_priority.into(), + }; - let task = SolverTask { - available_packages: &available_packages, - locked_packages: locked_packages - .into_iter() - .map(TryInto::try_into) - .collect::>>()?, - pinned_packages: pinned_packages - .into_iter() - .map(TryInto::try_into) - .collect::>>()?, - virtual_packages: virtual_packages.into_iter().map(Into::into).collect(), - specs: specs.into_iter().map(Into::into).collect(), - timeout: timeout.map(std::time::Duration::from_micros), - channel_priority: channel_priority.into(), - }; + Ok::<_, PyErr>( + Solver + .solve(task) + .map(|res| res.into_iter().map(Into::into).collect::>()) + .map_err(PyRattlerError::from)?, + ) + }) + .await; - Ok(Solver - .solve(task) - .map(|res| res.into_iter().map(Into::into).collect::>()) - .map_err(PyRattlerError::from)?) + match solve_result.map_err(JoinError::try_into_panic) { + Ok(solve_result) => Ok(solve_result?), + Err(Ok(payload)) => std::panic::resume_unwind(payload), + Err(Err(_err)) => Err(PyRattlerError::IoError(std::io::Error::new( + std::io::ErrorKind::Interrupted, + "solver task was cancelled", + )))?, + } }) } diff --git a/py-rattler/tests/conftest.py b/py-rattler/tests/conftest.py new file mode 100644 index 000000000..9c140ba84 --- /dev/null +++ b/py-rattler/tests/conftest.py @@ -0,0 +1,22 @@ +import os + +import pytest + +from rattler import Gateway, Channel + + +@pytest.fixture(scope="session") +def gateway() -> Gateway: + return Gateway() + +@pytest.fixture +def test_data_dir() -> str: + return os.path.normpath(os.path.join(os.path.dirname(__file__), "../../test-data")) + +@pytest.fixture +def conda_forge_channel(test_data_dir: str) -> Channel: + return Channel(os.path.join(test_data_dir, "channels/conda-forge")) + +@pytest.fixture +def pytorch_channel(test_data_dir: str) -> Channel: + return Channel(os.path.join(test_data_dir, "channels/pytorch")) diff --git a/py-rattler/tests/unit/test_link.py b/py-rattler/tests/unit/test_link.py index e235bfc72..b633209ce 100644 --- a/py-rattler/tests/unit/test_link.py +++ b/py-rattler/tests/unit/test_link.py @@ -1,31 +1,25 @@ -# type: ignore import os +from pathlib import Path + import pytest -from rattler import Channel, SparseRepoData, MatchSpec, solve, link +from rattler import solve, link, Gateway, Channel @pytest.mark.asyncio -async def test_link(tmp_path): +async def test_link(gateway: Gateway, conda_forge_channel: Channel, tmp_path: Path) -> None: cache_dir = tmp_path / "cache" env_dir = tmp_path / "env" - linux64_chan = Channel("conda-forge") - data_dir = os.path.join(os.path.dirname(__file__), "../../../test-data/") - linux64_path = os.path.join(data_dir, "channels/conda-forge/linux-64/repodata.json") - linux64_data = SparseRepoData( - channel=linux64_chan, - subdir="linux-64", - path=linux64_path, - ) - - solved_data = solve( - [MatchSpec("xtensor")], - [linux64_data], + solved_data = await solve( + [conda_forge_channel], + ["noarch"], + ["conda-forge-pinning"], + gateway, ) await link(solved_data, env_dir, cache_dir) - assert os.path.exists(env_dir / "include/xtensor.hpp") - assert os.path.exists(env_dir / "include/xtensor") - assert os.path.exists(env_dir / "include/xtl") + assert os.path.exists(env_dir / "conda_build_config.yaml") + assert os.path.exists(env_dir / "share/conda-forge/migrations/pypy37.yaml") + assert os.path.exists(env_dir / "share/conda-forge/migrations/pypy37-windows.yaml") diff --git a/py-rattler/tests/unit/test_solver.py b/py-rattler/tests/unit/test_solver.py index f20b13e73..aec3c859b 100644 --- a/py-rattler/tests/unit/test_solver.py +++ b/py-rattler/tests/unit/test_solver.py @@ -1,29 +1,18 @@ -# type: ignore -import os.path +import pytest from rattler import ( solve, - Channel, ChannelPriority, - MatchSpec, - RepoDataRecord, - SparseRepoData, + RepoDataRecord, Channel, Gateway, ) - -def test_solve(): - linux64_chan = Channel("conda-forge") - data_dir = os.path.join(os.path.dirname(__file__), "../../../test-data/") - linux64_path = os.path.join(data_dir, "channels/conda-forge/linux-64/repodata.json") - linux64_data = SparseRepoData( - channel=linux64_chan, - subdir="linux-64", - path=linux64_path, - ) - - solved_data = solve( - [MatchSpec("python"), MatchSpec("sqlite")], - [linux64_data], +@pytest.mark.asyncio +async def test_solve(gateway: Gateway, conda_forge_channel: Channel) -> None: + solved_data = await solve( + [conda_forge_channel], + ["linux-64"], + ["python", "sqlite"], + gateway, ) assert isinstance(solved_data, list) @@ -31,31 +20,17 @@ def test_solve(): assert len(solved_data) == 19 -def test_solve_channel_priority_disabled(): - cf_chan = Channel("conda-forge") - data_dir = os.path.join(os.path.dirname(__file__), "../../../test-data/") - cf_path = os.path.join(data_dir, "channels/conda-forge/linux-64/repodata.json") - cf_data = SparseRepoData( - channel=cf_chan, - subdir="linux-64", - path=cf_path, - ) - - pytorch_chan = Channel("pytorch") - pytorch_path = os.path.join(data_dir, "channels/pytorch/linux-64/repodata.json") - pytorch_data = SparseRepoData( - channel=pytorch_chan, - subdir="linux-64", - path=pytorch_path, - ) - - solved_data = solve( - [MatchSpec("pytorch-cpu=0.4.1=py36_cpu_1")], - [cf_data, pytorch_data], +@pytest.mark.asyncio +async def test_solve_channel_priority_disabled(gateway: Gateway, pytorch_channel: Channel, conda_forge_channel: Channel) -> None: + solved_data = await solve( + [conda_forge_channel, pytorch_channel], + ["linux-64"], + ["pytorch-cpu=0.4.1=py36_cpu_1"], + gateway, channel_priority=ChannelPriority.Disabled, ) assert isinstance(solved_data, list) assert isinstance(solved_data[0], RepoDataRecord) - assert list(filter(lambda r: r.file_name.startswith("pytorch-cpu-0.4.1-py36_cpu_1"), solved_data))[0].channel == "https://conda.anaconda.org/pytorch/" + assert list(filter(lambda r: r.file_name.startswith("pytorch-cpu-0.4.1-py36_cpu_1"), solved_data))[0].channel == pytorch_channel.base_url assert len(solved_data) == 32 diff --git a/test-data/channels/conda-forge/linux-64/repodata.json b/test-data/channels/conda-forge/linux-64/repodata.json index 966e55547..15baf460b 100644 --- a/test-data/channels/conda-forge/linux-64/repodata.json +++ b/test-data/channels/conda-forge/linux-64/repodata.json @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:7cc0553f5f68b30bbb831a03cb0a76cb5d40cd129e073e551e27c8373ee83f94 -size 205364821 +oid sha256:69822802dd157f97607ce07870f5645489fdcf29857c968020c57674b7b0e87e +size 205364889 diff --git a/test-data/channels/conda-forge/noarch/repodata.json b/test-data/channels/conda-forge/noarch/repodata.json index 051ae233b..1aa81b4f7 100644 --- a/test-data/channels/conda-forge/noarch/repodata.json +++ b/test-data/channels/conda-forge/noarch/repodata.json @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:05e0c4ce7be29f36949c33cce782f21aecfbdd41f9e3423839670fb38fc5d691 -size 51649914 +oid sha256:58c9b7ebea25cdbc1e2c67804f14c605a868adaef816e33717db98e328c5847e +size 51649980 diff --git a/test-data/channels/conda-forge/noarch/repodata.json.gz b/test-data/channels/conda-forge/noarch/repodata.json.gz index 958616456..d6c2fad8f 100644 Binary files a/test-data/channels/conda-forge/noarch/repodata.json.gz and b/test-data/channels/conda-forge/noarch/repodata.json.gz differ