diff --git a/README.md b/README.md index 5be3e144d..00b4b94ae 100644 --- a/README.md +++ b/README.md @@ -51,9 +51,12 @@ rustup target add wasm32-wasip1 # The compiled packages will be at `hyperdrive/target/packages.zip`. # The compiled binary will be at `hyperdrive/target/debug/hyperdrive`. # OPTIONAL: --release flag (slower build; faster runtime; binary at `hyperdrive/target/release/hyperdrive`). +# OPTIONAL: --parallel flag to build packages in parallel (faster but uses more resources). cd hyperdrive cargo run -p build-packages +# OR for parallel builds: +# cargo run -p build-packages -- --parallel # OPTIONAL: --release flag cargo build -p hyperdrive ``` diff --git a/scripts/build-packages/Cargo.toml b/scripts/build-packages/Cargo.toml index 6eb721a09..a6c822e65 100644 --- a/scripts/build-packages/Cargo.toml +++ b/scripts/build-packages/Cargo.toml @@ -8,6 +8,7 @@ anyhow = "1.0.71" clap = "4" fs-err = "2.11" kit = { git = "https://github.com/hyperware-ai/kit", rev = "275f02c" } +rayon = "1.8" serde = "1" serde_json = "1" tokio = "1.28" diff --git a/scripts/build-packages/src/main.rs b/scripts/build-packages/src/main.rs index ce8a9acb9..d55176f88 100644 --- a/scripts/build-packages/src/main.rs +++ b/scripts/build-packages/src/main.rs @@ -6,6 +6,7 @@ use std::{ use clap::{Arg, Command}; use fs_err as fs; +use rayon::prelude::*; use zip::write::FileOptions; #[derive(serde::Deserialize, serde::Serialize)] @@ -157,59 +158,77 @@ fn main() -> anyhow::Result<()> { }) .collect(); - let results: Vec)>> = fs::read_dir(&packages_dir)? - .filter_map(|entry| { - let entry_path = match entry { - Ok(e) => e.path(), - Err(_) => return None, - }; - let child_pkg_path = entry_path.join("pkg"); - if !child_pkg_path.exists() { - // don't run on, e.g., `.DS_Store` - return None; - } - let (local_dependency_array, is_hyperapp, package_specific_features) = - if let Some(filename) = entry_path.file_name() { - if let Some(maybe_params) = - build_parameters.remove(&filename.to_string_lossy().to_string()) - { - ( - maybe_params.local_dependencies.unwrap_or_default(), - maybe_params.is_hyperapp.unwrap_or_default(), - maybe_params.features.unwrap_or_default(), - ) + // First, collect all package info sequentially (since we're mutating build_parameters) + let package_build_info: Vec<(PathBuf, String, String, Vec, bool)> = + fs::read_dir(&packages_dir)? + .filter_map(|entry| { + let entry_path = match entry { + Ok(e) => e.path(), + Err(_) => return None, + }; + let child_pkg_path = entry_path.join("pkg"); + if !child_pkg_path.exists() { + // don't run on, e.g., `.DS_Store` + return None; + } + let (local_dependency_array, is_hyperapp, package_specific_features) = + if let Some(filename) = entry_path.file_name() { + if let Some(maybe_params) = + build_parameters.remove(&filename.to_string_lossy().to_string()) + { + ( + maybe_params.local_dependencies.unwrap_or_default(), + maybe_params.is_hyperapp.unwrap_or_default(), + maybe_params.features.unwrap_or_default(), + ) + } else { + (vec![], false, vec![]) + } } else { (vec![], false, vec![]) - } + }; + let package_specific_features = if package_specific_features.is_empty() { + features.clone() + } else if package_specific_features.contains(&"caller-utils".to_string()) { + // build without caller-utils flag, which will fail but will + // also create caller-utils crate (required for succeeding build) + let _ = build_and_zip_package( + entry_path.clone(), + child_pkg_path.to_str().unwrap(), + skip_frontend, + &features, + local_dependency_array.clone(), + is_hyperapp, + ); + format!("{features},{}", package_specific_features.join(",")) } else { - (vec![], false, vec![]) + format!("{features},{}", package_specific_features.join(",")) }; - let package_specific_features = if package_specific_features.is_empty() { - features.clone() - } else if package_specific_features.contains(&"caller-utils".to_string()) { - // build without caller-utils flag, which will fail but will - // also create caller-utils crate (required for succeeding build) - let _ = build_and_zip_package( - entry_path.clone(), - child_pkg_path.to_str().unwrap(), + Some(( + entry_path, + child_pkg_path.to_string_lossy().to_string(), + package_specific_features, + local_dependency_array, + is_hyperapp, + )) + }) + .collect(); + + // Build in parallel + let results: Vec)>> = package_build_info + .into_par_iter() + .map( + |(entry_path, child_pkg_path, package_features, local_deps, is_hyperapp)| { + build_and_zip_package( + entry_path, + &child_pkg_path, skip_frontend, - &features, - local_dependency_array.clone(), + &package_features, + local_deps, is_hyperapp, - ); - format!("{features},{}", package_specific_features.join(",")) - } else { - format!("{features},{}", package_specific_features.join(",")) - }; - Some(build_and_zip_package( - entry_path.clone(), - child_pkg_path.to_str().unwrap(), - skip_frontend, - &package_specific_features, - local_dependency_array, - is_hyperapp, - )) - }) + ) + }, + ) .collect(); let mut file_to_metadata = std::collections::HashMap::new();