From 4e6e6ab10da2dcf6d0b41e6d9c24faaa59c44ced Mon Sep 17 00:00:00 2001 From: Sean Klein Date: Thu, 13 Oct 2022 10:01:37 -0700 Subject: [PATCH 1/3] Restructure package format, add Composite Packages This PR re-structures the format of the "package" structure, which also has implications for package-manifest.toml files. What did we change, and why? - Unified internal/external packages: There is information about packages which is shared between internal and external packages, so they've been pulled into the same structure. The "input" for a package is now described by "PackageSource", which can be one of "local" (you're building the package), "prebuilt" (you're downloading the package), "composite" (you're combining packages") or "manual" (you're supplying the package yourself). - The boolean "zone" flag has been updated to an enum, called "PackageOutput". This enum describes the different outputs that one should expect from a package. - The "Target" evaluation information has been moved to this package from the primary Omicron repository. This makes it easier to act on the 'only_for_target' and 'intermediate_only' fields in a useful way. --- src/blob.rs | 98 +++++++ src/config.rs | 55 ++-- src/lib.rs | 3 + src/package.rs | 512 ++++++++++++++++++--------------- src/progress.rs | 24 ++ src/target.rs | 71 +++++ tests/mod.rs | 66 ++++- tests/service-a/cfg.toml | 14 +- tests/service-b/cfg.toml | 14 +- tests/service-c/cfg.toml | 7 +- tests/service-d/cfg.toml | 7 +- tests/service-e/cfg.toml | 21 ++ tests/service-e/pkg-1-file.txt | 0 tests/service-e/pkg-2-file.txt | 0 14 files changed, 608 insertions(+), 284 deletions(-) create mode 100644 src/blob.rs create mode 100644 src/progress.rs create mode 100644 src/target.rs create mode 100644 tests/service-e/cfg.toml create mode 100644 tests/service-e/pkg-1-file.txt create mode 100644 tests/service-e/pkg-2-file.txt diff --git a/src/blob.rs b/src/blob.rs new file mode 100644 index 0000000..ad14e26 --- /dev/null +++ b/src/blob.rs @@ -0,0 +1,98 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at https://mozilla.org/MPL/2.0/. + +//! Tools for downloading blobs + +use anyhow::{anyhow, bail, Result}; +use chrono::{DateTime, FixedOffset, Utc}; +use reqwest::header::{CONTENT_LENGTH, LAST_MODIFIED}; +use std::path::Path; +use std::str::FromStr; +use tokio::io::AsyncWriteExt; + +// Path to the blob S3 Bucket. +const S3_BUCKET: &str = "https://oxide-omicron-build.s3.amazonaws.com"; +// Name for the directory component where downloaded blobs are stored. +pub(crate) const BLOB: &str = "blob"; + +// Downloads "source" from S3_BUCKET to "destination". +pub async fn download(source: &str, destination: &Path) -> Result<()> { + let url = format!("{}/{}", S3_BUCKET, source); + let client = reqwest::Client::new(); + + if destination.exists() { + // If destination exists, check against size and last modified time. If + // both are the same, then return Ok + let head_response = client.head(&url).send().await?; + if !head_response.status().is_success() { + bail!("head failed! {:?}", head_response); + } + + let headers = head_response.headers(); + + // From S3, header looks like: + // + // "Content-Length: 49283072" + let content_length = headers + .get(CONTENT_LENGTH) + .ok_or_else(|| anyhow!("no content length on {} HEAD response!", url))?; + let content_length: u64 = u64::from_str(content_length.to_str()?)?; + + // From S3, header looks like: + // + // "Last-Modified: Fri, 27 May 2022 20:50:17 GMT" + let last_modified = headers + .get(LAST_MODIFIED) + .ok_or_else(|| anyhow!("no last modified on {} HEAD response!", url))?; + let last_modified: DateTime = + chrono::DateTime::parse_from_rfc2822(last_modified.to_str()?)?; + let metadata = tokio::fs::metadata(&destination).await?; + let metadata_modified: DateTime = metadata.modified()?.into(); + + if metadata.len() == content_length && metadata_modified == last_modified { + return Ok(()); + } + } + + println!( + "Downloading {} to {}", + source, + destination.to_string_lossy() + ); + + let response = client.get(url).send().await?; + + // Store modified time from HTTPS response + let last_modified = response + .headers() + .get(LAST_MODIFIED) + .ok_or_else(|| anyhow!("no last modified on GET response!"))?; + let last_modified: DateTime = + chrono::DateTime::parse_from_rfc2822(last_modified.to_str()?)?; + + // Write file bytes to destination + let mut file = tokio::fs::File::create(destination).await?; + file.write_all(&response.bytes().await?).await?; + drop(file); + + // Set destination file's modified time based on HTTPS response + filetime::set_file_mtime( + destination, + filetime::FileTime::from_system_time(last_modified.into()), + )?; + + Ok(()) +} + +#[test] +fn test_converts() { + let content_length = "1966080"; + let last_modified = "Fri, 30 Apr 2021 22:37:39 GMT"; + + let content_length: u64 = u64::from_str(content_length).unwrap(); + assert_eq!(1966080, content_length); + + let _last_modified: DateTime = + chrono::DateTime::parse_from_rfc2822(last_modified).unwrap(); +} diff --git a/src/config.rs b/src/config.rs index c1ac876..fe111be 100644 --- a/src/config.rs +++ b/src/config.rs @@ -4,49 +4,42 @@ //! Configuration for a package. -use crate::package::Package; +use crate::package::{Package, PackageOutput}; +use crate::target::Target; use serde_derive::Deserialize; use std::collections::BTreeMap; use std::path::Path; use thiserror::Error; -/// Describes the origin of an externally-built package. -#[derive(Deserialize, Debug)] -#[serde(tag = "type", rename_all = "lowercase")] -pub enum ExternalPackageSource { - /// Downloads the package from the following URL: - /// - /// - Prebuilt { - repo: String, - commit: String, - sha256: String, - }, - /// Expects that a package will be manually built and placed into the output - /// directory. - Manual, -} - -/// Describes a package which originates from outside this repo. -#[derive(Deserialize, Debug)] -pub struct ExternalPackage { - #[serde(flatten)] - pub package: Package, - - pub source: ExternalPackageSource, -} - /// Describes the configuration for a set of packages. #[derive(Deserialize, Debug)] pub struct Config { /// Packages to be built and installed. #[serde(default, rename = "package")] pub packages: BTreeMap, +} - /// Packages to be installed, but which have been created outside this - /// repository. - #[serde(default, rename = "external_package")] - pub external_packages: BTreeMap, +impl Config { + /// Returns target packages to be assembled on the builder machine. + pub fn packages_to_build(&self, target: &Target) -> BTreeMap<&String, &Package> { + self.packages + .iter() + .filter(|(_, pkg)| target.includes_package(&pkg)) + .map(|(name, pkg)| (name, pkg)) + .collect() + } + + /// Returns target packages which should execute on the deployment machine. + pub fn packages_to_deploy(&self, target: &Target) -> BTreeMap<&String, &Package> { + let all_packages = self.packages_to_build(target); + all_packages + .into_iter() + .filter(|(_, pkg)| match pkg.output { + PackageOutput::Zone { intermediate_only } => !intermediate_only, + PackageOutput::Tarball => true, + }) + .collect() + } } /// Errors which may be returned when parsing the server configuration. diff --git a/src/lib.rs b/src/lib.rs index de3850c..6192931 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -2,5 +2,8 @@ // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at https://mozilla.org/MPL/2.0/. +pub mod blob; pub mod config; pub mod package; +pub mod progress; +pub mod target; diff --git a/src/package.rs b/src/package.rs index 8c27bb0..91246a6 100644 --- a/src/package.rs +++ b/src/package.rs @@ -4,114 +4,34 @@ //! Utility for bundling target binaries as tarfiles. -use anyhow::{anyhow, bail, Result}; -use chrono::{DateTime, FixedOffset, Utc}; -use reqwest::header::{CONTENT_LENGTH, LAST_MODIFIED}; +use crate::blob::BLOB; +use crate::progress::{NoProgress, Progress}; +use anyhow::{anyhow, Context, Result}; use serde_derive::Deserialize; -use std::borrow::Cow; use std::collections::BTreeMap; use std::convert::TryInto; use std::fs::{File, OpenOptions}; use std::path::{Path, PathBuf}; -use std::str::FromStr; use tar::Builder; use tokio::io::{AsyncSeekExt, AsyncWriteExt}; -// Path to the blob S3 Bucket. -const S3_BUCKET: &str = "https://oxide-omicron-build.s3.amazonaws.com"; -// Name for the directory component where downloaded blobs are stored. -const BLOB: &str = "blob"; - -#[test] -fn test_converts() { - let content_length = "1966080"; - let last_modified = "Fri, 30 Apr 2021 22:37:39 GMT"; - - let content_length: u64 = u64::from_str(content_length).unwrap(); - assert_eq!(1966080, content_length); - - let _last_modified: DateTime = - chrono::DateTime::parse_from_rfc2822(last_modified).unwrap(); -} - -// Downloads "source" from S3_BUCKET to "destination". -pub async fn download(source: &str, destination: &Path) -> Result<()> { - let url = format!("{}/{}", S3_BUCKET, source); - let client = reqwest::Client::new(); - - if destination.exists() { - // If destination exists, check against size and last modified time. If - // both are the same, then return Ok - let head_response = client.head(&url).send().await?; - if !head_response.status().is_success() { - bail!("head failed! {:?}", head_response); - } - - let headers = head_response.headers(); - - // From S3, header looks like: - // - // "Content-Length: 49283072" - let content_length = headers - .get(CONTENT_LENGTH) - .ok_or_else(|| anyhow!("no content length on {} HEAD response!", url))?; - let content_length: u64 = u64::from_str(content_length.to_str()?)?; - - // From S3, header looks like: - // - // "Last-Modified: Fri, 27 May 2022 20:50:17 GMT" - let last_modified = headers - .get(LAST_MODIFIED) - .ok_or_else(|| anyhow!("no last modified on {} HEAD response!", url))?; - let last_modified: DateTime = - chrono::DateTime::parse_from_rfc2822(last_modified.to_str()?)?; - let metadata = tokio::fs::metadata(&destination).await?; - let metadata_modified: DateTime = metadata.modified()?.into(); - - if metadata.len() == content_length && metadata_modified == last_modified { - return Ok(()); - } - } - - println!( - "Downloading {} to {}", - source, - destination.to_string_lossy() - ); - - let response = client.get(url).send().await?; - - // Store modified time from HTTPS response - let last_modified = response - .headers() - .get(LAST_MODIFIED) - .ok_or_else(|| anyhow!("no last modified on GET response!"))?; - let last_modified: DateTime = - chrono::DateTime::parse_from_rfc2822(last_modified.to_str()?)?; - - // Write file bytes to destination - let mut file = tokio::fs::File::create(destination).await?; - file.write_all(&response.bytes().await?).await?; - drop(file); - - // Set destination file's modified time based on HTTPS response - filetime::set_file_mtime( - destination, - filetime::FileTime::from_system_time(last_modified.into()), - )?; - - Ok(()) -} - // Helper to open a tarfile for reading/writing. -fn open_tarfile(tarfile: &Path) -> Result { +fn create_tarfile + std::fmt::Debug>(tarfile: P) -> Result { OpenOptions::new() .write(true) .read(true) .truncate(true) .create(true) - .open(&tarfile) - .map_err(|err| anyhow!("Cannot create tarfile: {}", err)) + .open(tarfile.as_ref()) + .map_err(|err| anyhow!("Cannot create tarfile {:?}: {}", tarfile, err)) +} + +// Helper to open a tarfile for reading. +fn open_tarfile + std::fmt::Debug>(tarfile: P) -> Result { + OpenOptions::new() + .read(true) + .open(tarfile.as_ref()) + .map_err(|err| anyhow!("Cannot open tarfile {:?}: {}", tarfile, err)) } // Returns the path as it should be placed within an archive, by @@ -150,6 +70,13 @@ fn add_directory_and_parents( let mut parents: Vec<&Path> = to.ancestors().collect::>(); parents.reverse(); + if to.is_relative() { + return Err(anyhow!( + "Cannot add 'to = {}'; absolute path required", + to.to_string_lossy() + )); + } + for parent in parents { let dst = archive_path(&parent)?; archive.append_dir(&dst, ".")?; @@ -158,21 +85,58 @@ fn add_directory_and_parents( Ok(()) } -/// Trait for propagating progress information while constructing the package. -pub trait Progress { - /// Updates the message displayed regarding progress constructing - /// the package. - fn set_message(&self, msg: impl Into>); +/// Describes the origin of an externally-built package. +#[derive(Deserialize, Debug)] +#[serde(tag = "type", rename_all = "lowercase")] +pub enum PackageSource { + /// Describes a package which should be assembled locally. + Local { + /// A list of blobs from the Omicron build S3 bucket which should be placed + /// within this package. + blobs: Option>, + + /// Configuration for packages containing Rust binaries. + rust: Option, + + /// A set of mapped paths which appear within the archive. + #[serde(default)] + paths: Vec, + }, + + /// Downloads the package from the following URL: + /// + /// + Prebuilt { + repo: String, + commit: String, + sha256: String, + }, + + /// A composite package, created by merging multiple tarballs into one. + /// + /// Currently, this package can only merge zone images. + Composite { packages: Vec }, - /// Increments the number of things which have completed. - fn increment(&self, delta: u64); + /// Expects that a package will be manually built and placed into the output + /// directory. + Manual, } -/// Implements [`Progress`] as a no-op. -struct NoProgress; -impl Progress for NoProgress { - fn set_message(&self, _msg: impl Into>) {} - fn increment(&self, _delta: u64) {} +/// Describes the output format of the package. +#[derive(Deserialize, Debug, Clone)] +#[serde(tag = "type", rename_all = "lowercase")] +pub enum PackageOutput { + /// A complete zone image, ready to be deployed to the target. + Zone { + /// "true" if the package is only used to construct composite packages. + /// + /// This can be used to signal that the package should *not* be + /// installed by itself. + #[serde(default)] + intermediate_only: bool, + }, + /// A tarball, ready to be deployed to the target. + Tarball, } /// A single package. @@ -181,19 +145,14 @@ pub struct Package { /// The name of the service name to be used on the target OS. pub service_name: String, - /// A list of blobs from the Omicron build S3 bucket which should be placed - /// within this package. - pub blobs: Option>, - - /// Configuration for packages containing Rust binaries. - pub rust: Option, - - /// A set of mapped paths which appear within the archive. - #[serde(default)] - pub paths: Vec, + /// Identifies from where the package originates. + /// + /// For example, do we need to assemble it ourselves, or pull it from + /// somewhere else? + pub source: PackageSource, - /// Identifies if the package should be packaged into a zone image. - pub zone: bool, + /// Identifies what the output of the package should be. + pub output: PackageOutput, /// Identifies the targets for which the package should be included. /// @@ -205,12 +164,38 @@ pub struct Package { pub setup_hint: Option, } +async fn new_zone_archive_builder( + package_name: &str, + output_directory: &Path, +) -> Result>> { + let tarfile = output_directory.join(format!("{}.tar.gz", package_name)); + let file = create_tarfile(tarfile)?; + // TODO: Consider using async compression, async tar. + // It's not the *worst* thing in the world for a packaging tool to block + // here, but it would help the other async threads remain responsive if + // we avoided blocking. + let gzw = flate2::write::GzEncoder::new(file, flate2::Compression::fast()); + let mut archive = Builder::new(gzw); + archive.mode(tar::HeaderMode::Deterministic); + + // The first file in the archive must always be a JSON file + // which identifies the format of the rest of the archive. + // + // See the OMICRON1(5) man page for more detail. + let mut root_json = tokio::fs::File::from_std(tempfile::tempfile()?); + let contents = r#"{"v":"1","t":"layer"}"#; + root_json.write_all(contents.as_bytes()).await?; + root_json.seek(std::io::SeekFrom::Start(0)).await?; + archive.append_file("oxide.json", &mut root_json.into_std().await)?; + + Ok(archive) +} + impl Package { pub fn get_output_path(&self, name: &str, output_directory: &Path) -> PathBuf { - if self.zone { - output_directory.join(format!("{}.tar.gz", name)) - } else { - output_directory.join(format!("{}.tar", name)) + match self.output { + PackageOutput::Zone { .. } => output_directory.join(format!("{}.tar.gz", name)), + PackageOutput::Tarball => output_directory.join(format!("{}.tar", name)), } } @@ -231,22 +216,30 @@ impl Package { // - 1 tick for each included path // - 1 tick for the rust binary // - 1 tick per blob - let progress_total = self - .paths - .iter() - .map(|path| { - walkdir::WalkDir::new(&path.from) - .follow_links(true) - .into_iter() - .count() - }) - .sum::() - + if self.rust.is_some() { 1 } else { 0 } - + if let Some(blobs) = &self.blobs { - blobs.len() - } else { - 0 - }; + let progress_total = match &self.source { + PackageSource::Local { blobs, rust, paths } => { + let blob_work = if let Some(blobs) = blobs { + blobs.len() + } else { + 0 + }; + + let rust_work = if rust.is_some() { 1 } else { 0 }; + + let paths_work = paths + .iter() + .map(|path| { + walkdir::WalkDir::new(&path.from) + .follow_links(true) + .into_iter() + .count() + }) + .sum::(); + + rust_work + blob_work + paths_work + } + _ => 1, + }; progress_total.try_into().unwrap() } @@ -267,27 +260,45 @@ impl Package { name: &str, output_directory: &Path, ) -> Result { - if self.zone { - self.create_zone_package(progress, name, output_directory) - .await - } else { - self.create_tarball_package(progress, name, output_directory) - .await + match self.output { + PackageOutput::Zone { .. } => { + self.create_zone_package(progress, name, output_directory) + .await + } + PackageOutput::Tarball => { + self.create_tarball_package(progress, name, output_directory) + .await + } } } + async fn add_paths( + &self, + progress: &impl Progress, + archive: &mut Builder, + paths: &Vec, + ) -> Result<()> { + tokio::task::block_in_place(move || self.add_paths_sync(progress, archive, paths))?; + Ok(()) + } + // Add mapped paths to the package. - async fn add_paths( + fn add_paths_sync( &self, progress: &impl Progress, archive: &mut Builder, + paths: &Vec, ) -> Result<()> { progress.set_message("adding paths"); - for path in &self.paths { - if self.zone { - // Zone images require all paths to have their parents before - // they may be unpacked. - add_directory_and_parents(archive, path.to.parent().unwrap())?; + + for path in paths { + match self.output { + PackageOutput::Zone { .. } => { + // Zone images require all paths to have their parents before + // they may be unpacked. + add_directory_and_parents(archive, path.to.parent().unwrap())?; + } + PackageOutput::Tarball => {} } if !path.from.exists() { // Strictly speaking, this check is redundant, but it provides @@ -314,18 +325,26 @@ impl Package { for entry in entries { let entry = entry?; let dst = &path.to.join(entry.path().strip_prefix(&from_root)?); - let dst = if self.zone { - // Zone images must explicitly label all destination paths - // as within "root/". - archive_path(dst)? - } else { - dst.to_path_buf() + + let dst = match self.output { + PackageOutput::Zone { .. } => { + // Zone images must explicitly label all destination paths + // as within "root/". + archive_path(dst)? + } + PackageOutput::Tarball => dst.to_path_buf(), }; if entry.file_type().is_dir() { archive.append_dir(&dst, ".")?; } else if entry.file_type().is_file() { - archive.append_path_with_name(entry.path(), &dst)?; + archive + .append_path_with_name(entry.path(), &dst) + .context(format!( + "Failed to add file '{}' to '{}'", + entry.path().display(), + dst.display() + ))?; } else { panic!( "Unsupported file type: {:?} for {:?}", @@ -339,6 +358,33 @@ impl Package { Ok(()) } + async fn add_rust( + &self, + progress: &impl Progress, + archive: &mut Builder, + ) -> Result<()> { + match &self.source { + PackageSource::Local { + rust: Some(rust_pkg), + .. + } => { + progress.set_message("adding rust binaries"); + let dst = match self.output { + PackageOutput::Zone { .. } => { + let dst = Path::new("/opt/oxide").join(&self.service_name).join("bin"); + add_directory_and_parents(archive, &dst)?; + archive_path(&dst)? + } + PackageOutput::Tarball => PathBuf::from(""), + }; + rust_pkg.add_binaries_to_archive(archive, &dst)?; + progress.increment(1); + } + _ => {} + } + Ok(()) + } + // Adds blobs from S3 to the package. // // - `progress`: Reports progress while adding blobs. @@ -354,15 +400,20 @@ impl Package { destination_path: &Path, ) -> Result<()> { progress.set_message("adding blobs"); - if let Some(blobs) = &self.blobs { - let blobs_path = download_directory.join(&self.service_name); - std::fs::create_dir_all(&blobs_path)?; - for blob in blobs { - let blob_path = blobs_path.join(blob); - download(&blob.to_string_lossy(), &blob_path).await?; - progress.increment(1); + match &self.source { + PackageSource::Local { + blobs: Some(blobs), .. + } => { + let blobs_path = download_directory.join(&self.service_name); + std::fs::create_dir_all(&blobs_path)?; + for blob in blobs { + let blob_path = blobs_path.join(blob); + crate::blob::download(&blob.to_string_lossy(), &blob_path).await?; + progress.increment(1); + } + archive.append_dir_all(&destination_path, &blobs_path)?; } - archive.append_dir_all(&destination_path, &blobs_path)?; + _ => (), } Ok(()) } @@ -373,52 +424,62 @@ impl Package { name: &str, output_directory: &Path, ) -> Result { - // Create a tarball which will become an Omicron-brand image - // archive. - let tarfile = self.get_output_path(name, output_directory); - let file = open_tarfile(&tarfile)?; - - // TODO: Consider using async compression, async tar. - // It's not the *worst* thing in the world for a packaging tool to block - // here, but it would help the other async threads remain responsive if - // we avoided blocking. - let gzw = flate2::write::GzEncoder::new(file, flate2::Compression::fast()); - let mut archive = Builder::new(gzw); - archive.mode(tar::HeaderMode::Deterministic); - - // The first file in the archive must always be a JSON file - // which identifies the format of the rest of the archive. - // - // See the OMICRON1(5) man page for more detail. - let mut root_json = tokio::fs::File::from_std(tempfile::tempfile()?); - let contents = r#"{"v":"1","t":"layer"}"#; - root_json.write_all(contents.as_bytes()).await?; - root_json.seek(std::io::SeekFrom::Start(0)).await?; - archive.append_file("oxide.json", &mut root_json.into_std().await)?; - - // Add mapped paths. - self.add_paths(progress, &mut archive).await?; - - // Attempt to add the rust binary, if one was built. - progress.set_message("adding rust binaries"); - if let Some(rust_pkg) = &self.rust { - let dst = Path::new("/opt/oxide").join(&self.service_name).join("bin"); - add_directory_and_parents(&mut archive, &dst)?; - let dst = archive_path(&dst)?; - rust_pkg.add_binaries_to_archive(&mut archive, &dst)?; - progress.increment(1); + let mut archive = new_zone_archive_builder(name, output_directory).await?; + + match &self.source { + PackageSource::Local { paths, .. } => { + // Add mapped paths. + self.add_paths(progress, &mut archive, &paths).await?; + + // Attempt to add the rust binary, if one was built. + self.add_rust(progress, &mut archive).await?; + + // Add (and possibly download) blobs + let blob_dst = Path::new("/opt/oxide").join(&self.service_name).join(BLOB); + self.add_blobs( + progress, + &mut archive, + output_directory, + &archive_path(&blob_dst)?, + ) + .await?; + } + PackageSource::Composite { packages } => { + // For each of the component packages, open the tarfile, and add + // it to our top-level archive. + let tmp = tempfile::tempdir()?; + for component_package in packages { + let component_path = output_directory.join(component_package); + let gzr = flate2::read::GzDecoder::new(open_tarfile(component_path)?); + let mut component_reader = tar::Archive::new(gzr); + let entries = component_reader.entries()?; + + // First, unpack the existing entries + for entry in entries { + let mut entry = entry?; + + // Ignore the JSON header files + if entry.path()? == Path::new("oxide.json") { + continue; + } + + let entry_unpack_path = + tmp.path().join(entry.path()?.strip_prefix("root/")?); + entry.unpack(&entry_unpack_path)?; + assert!(entry_unpack_path.exists()); + + archive.append_path_with_name(entry_unpack_path, entry.path()?)?; + } + } + } + _ => { + return Err(anyhow!( + "Cannot create a zone package with source: {:?}", + self.source + )); + } } - // Add (and possibly download) blobs - let blob_dst = Path::new("/opt/oxide").join(&self.service_name).join(BLOB); - self.add_blobs( - progress, - &mut archive, - output_directory, - &archive_path(&blob_dst)?, - ) - .await?; - let file = archive .into_inner() .map_err(|err| anyhow!("Failed to finalize archive: {}", err))?; @@ -435,30 +496,29 @@ impl Package { // Create a tarball containing the necessary executable and auxiliary // files. let tarfile = self.get_output_path(name, output_directory); - let file = open_tarfile(&tarfile)?; + let file = create_tarfile(&tarfile)?; // TODO: We could add compression here, if we'd like? let mut archive = Builder::new(file); archive.mode(tar::HeaderMode::Deterministic); - // Add mapped paths. - self.add_paths(progress, &mut archive).await?; - - // Attempt to add the rust binary, if one was built. - progress.set_message("adding rust binaries"); - if let Some(rust_pkg) = &self.rust { - rust_pkg.add_binaries_to_archive(&mut archive, Path::new(""))?; - progress.increment(1); - } + match &self.source { + PackageSource::Local { paths, .. } => { + // Add mapped paths. + self.add_paths(progress, &mut archive, paths).await?; - // Add (and possibly download) blobs - self.add_blobs(progress, &mut archive, output_directory, &Path::new(BLOB)) - .await?; + // Attempt to add the rust binary, if one was built. + self.add_rust(progress, &mut archive).await?; - let file = archive - .into_inner() - .map_err(|err| anyhow!("Failed to finalize archive: {}", err))?; + // Add (and possibly download) blobs + self.add_blobs(progress, &mut archive, output_directory, &Path::new(BLOB)) + .await?; - Ok(file) + Ok(archive + .into_inner() + .map_err(|err| anyhow!("Failed to finalize archive: {}", err))?) + } + _ => return Err(anyhow!("Cannot create non-local tarball")), + } } } diff --git a/src/progress.rs b/src/progress.rs new file mode 100644 index 0000000..0c71316 --- /dev/null +++ b/src/progress.rs @@ -0,0 +1,24 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at https://mozilla.org/MPL/2.0/. + +//! Describes utilities for relaying progress to end-users. + +use std::borrow::Cow; + +/// Trait for propagating progress information while constructing the package. +pub trait Progress { + /// Updates the message displayed regarding progress constructing + /// the package. + fn set_message(&self, msg: impl Into>); + + /// Increments the number of things which have completed. + fn increment(&self, delta: u64); +} + +/// Implements [`Progress`] as a no-op. +pub struct NoProgress; +impl Progress for NoProgress { + fn set_message(&self, _msg: impl Into>) {} + fn increment(&self, _delta: u64) {} +} diff --git a/src/target.rs b/src/target.rs new file mode 100644 index 0000000..1b78ff3 --- /dev/null +++ b/src/target.rs @@ -0,0 +1,71 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at https://mozilla.org/MPL/2.0/. + +use crate::package::Package; +use std::collections::BTreeMap; + +/// A target describes what platform and configuration we're trying +/// to deploy on. +#[derive(Clone, Debug)] +pub struct Target(pub BTreeMap); + +impl Target { + // Returns true if this target should include the package. + pub(crate) fn includes_package(&self, pkg: &Package) -> bool { + let valid_targets = if let Some(targets) = &pkg.only_for_targets { + // If targets are specified for the packages, filter them. + targets + } else { + // If no targets are specified, assume the package should be + // included by default. + return true; + }; + + // For each of the targets permitted by the package, check if + // the current target matches. + for (k, v) in valid_targets { + let target_value = if let Some(target_value) = self.0.get(k) { + target_value + } else { + return false; + }; + + if target_value != v { + return false; + }; + } + return true; + } +} + +impl std::fmt::Display for Target { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + for (key, value) in &self.0 { + write!(f, "{}={} ", key, value)?; + } + Ok(()) + } +} + +#[derive(thiserror::Error, Debug)] +pub enum TargetParseError { + #[error("Cannot parse key-value pair out of '{0}'")] + MissingEquals(String), +} + +impl std::str::FromStr for Target { + type Err = TargetParseError; + + fn from_str(s: &str) -> Result { + let kvs = s + .split_whitespace() + .map(|kv| { + kv.split_once('=') + .ok_or_else(|| TargetParseError::MissingEquals(kv.to_string())) + .map(|(k, v)| (k.to_string(), v.to_string())) + }) + .collect::, _>>()?; + Ok(Target(kvs)) + } +} diff --git a/tests/mod.rs b/tests/mod.rs index f16f63c..442087c 100644 --- a/tests/mod.rs +++ b/tests/mod.rs @@ -11,7 +11,7 @@ mod test { use std::path::{Path, PathBuf}; use tar::Archive; - use omicron_zone_package::package::download; + use omicron_zone_package::blob::download; fn get_next<'a, R: 'a + Read>(entries: &mut tar::Entries<'a, R>) -> PathBuf { entries @@ -24,7 +24,7 @@ mod test { } // Tests a package of arbitrary files is being placed into a Zone image - #[tokio::test] + #[tokio::test(flavor = "multi_thread")] async fn test_package_as_zone() { // Parse the configuration let cfg = config::parse("tests/service-a/cfg.toml").unwrap(); @@ -62,7 +62,7 @@ mod test { } // Tests a rust package being placed into a Zone image - #[tokio::test] + #[tokio::test(flavor = "multi_thread")] async fn test_rust_package_as_zone() { // Parse the configuration let cfg = config::parse("tests/service-b/cfg.toml").unwrap(); @@ -107,7 +107,7 @@ mod test { // // This is used for building packages that exist in the Global Zone, // and don't need (nor want) to be packaged into a full Zone image. - #[tokio::test] + #[tokio::test(flavor = "multi_thread")] async fn test_rust_package_as_tarball() { // Parse the configuration let cfg = config::parse("tests/service-c/cfg.toml").unwrap(); @@ -130,7 +130,7 @@ mod test { // Although package and service names are often the same, they do // not *need* to be the same. This is an example of them both // being explicitly different. - #[tokio::test] + #[tokio::test(flavor = "multi_thread")] async fn test_rust_package_with_disinct_service_name() { // Parse the configuration let cfg = config::parse("tests/service-d/cfg.toml").unwrap(); @@ -138,6 +138,8 @@ mod test { let service_name = "my-service"; let package = cfg.packages.get(package_name).unwrap(); + assert_eq!(package.service_name, service_name); + // Create the packaged file let out = tempfile::tempdir().unwrap(); package.create(package_name, out.path()).await.unwrap(); @@ -151,7 +153,59 @@ mod test { assert!(ents.next().is_none()); } - #[tokio::test] + #[tokio::test(flavor = "multi_thread")] + async fn test_composite_package() { + // Parse the configuration + let cfg = config::parse("tests/service-e/cfg.toml").unwrap(); + let out = tempfile::tempdir().unwrap(); + + // Build the dependencies first. + let package_dependencies = ["pkg-1", "pkg-2"]; + for package_name in package_dependencies { + let package = cfg.packages.get(package_name).unwrap(); + // Create the packaged file + package.create(package_name, out.path()).await.unwrap(); + } + + // Build the composite package + let package_name = "pkg-3"; + let package = cfg.packages.get(package_name).unwrap(); + package.create(package_name, out.path()).await.unwrap(); + + // Verify the contents + let path = package.get_output_path(package_name, &out.path()); + assert!(path.exists()); + let gzr = flate2::read::GzDecoder::new(File::open(path).unwrap()); + let mut archive = Archive::new(gzr); + let mut ents = archive.entries().unwrap(); + assert_eq!(Path::new("oxide.json"), get_next(&mut ents)); + assert_eq!(Path::new("root/"), get_next(&mut ents)); + assert_eq!(Path::new("root/opt"), get_next(&mut ents)); + assert_eq!(Path::new("root/opt/oxide"), get_next(&mut ents)); + assert_eq!( + Path::new("root/opt/oxide/pkg-1-file.txt"), + get_next(&mut ents) + ); + assert_eq!(Path::new("root/"), get_next(&mut ents)); + assert_eq!(Path::new("root/opt"), get_next(&mut ents)); + assert_eq!(Path::new("root/opt/oxide"), get_next(&mut ents)); + assert_eq!( + Path::new("root/opt/oxide/pkg-2-file.txt"), + get_next(&mut ents) + ); + assert_eq!(Path::new("root/"), get_next(&mut ents)); + assert_eq!(Path::new("root/opt"), get_next(&mut ents)); + assert_eq!(Path::new("root/opt/oxide"), get_next(&mut ents)); + assert_eq!(Path::new("root/opt/oxide/svc-2"), get_next(&mut ents)); + assert_eq!(Path::new("root/opt/oxide/svc-2/bin"), get_next(&mut ents)); + assert_eq!( + Path::new("root/opt/oxide/svc-2/bin/test-service"), + get_next(&mut ents) + ); + assert!(ents.next().is_none()); + } + + #[tokio::test(flavor = "multi_thread")] async fn test_download() -> Result<()> { let out = tempfile::tempdir()?; diff --git a/tests/service-a/cfg.toml b/tests/service-a/cfg.toml index 07967a3..3e309ec 100644 --- a/tests/service-a/cfg.toml +++ b/tests/service-a/cfg.toml @@ -1,10 +1,8 @@ [package.my-service] service_name = "my-service" -zone = true - -[[package.my-service.paths]] -from = "tests/service-a/subdirectory" -to = "/opt/oxide/my-service" -[[package.my-service.paths]] -from = "tests/service-a/single-file.txt" -to = "/opt/oxide/my-service/single-file.txt" +source.type = "local" +source.paths = [ + { from = "tests/service-a/subdirectory", to = "/opt/oxide/my-service" }, + { from = "tests/service-a/single-file.txt", to = "/opt/oxide/my-service/single-file.txt" } +] +output.type = "zone" diff --git a/tests/service-b/cfg.toml b/tests/service-b/cfg.toml index 06690b1..7c17bbb 100644 --- a/tests/service-b/cfg.toml +++ b/tests/service-b/cfg.toml @@ -1,9 +1,9 @@ [package.my-service] -rust.binary_names = ["test-service"] -rust.release = false service_name = "my-service" -zone = true - -[[package.my-service.paths]] -from = "tests/service-b/subdirectory" -to = "/opt/oxide/my-service" +source.type = "local" +source.rust.binary_names = ["test-service"] +source.rust.release = false +source.paths = [ + { from = "tests/service-b/subdirectory", to = "/opt/oxide/my-service" } +] +output.type = "zone" diff --git a/tests/service-c/cfg.toml b/tests/service-c/cfg.toml index 009238f..b1919cc 100644 --- a/tests/service-c/cfg.toml +++ b/tests/service-c/cfg.toml @@ -1,5 +1,6 @@ [package.my-service] service_name = "my-service" -rust.binary_names = ["test-service"] -rust.release = false -zone = false +source.type = "local" +source.rust.binary_names = ["test-service"] +source.rust.release = false +output.type = "tarball" diff --git a/tests/service-d/cfg.toml b/tests/service-d/cfg.toml index f675f52..702c242 100644 --- a/tests/service-d/cfg.toml +++ b/tests/service-d/cfg.toml @@ -1,5 +1,6 @@ [package.my-package] service_name = "my-service" -rust.binary_names = ["test-service"] -rust.release = false -zone = false +source.type = "local" +source.rust.binary_names = ["test-service"] +source.rust.release = false +output.type = "tarball" diff --git a/tests/service-e/cfg.toml b/tests/service-e/cfg.toml new file mode 100644 index 0000000..f82ec18 --- /dev/null +++ b/tests/service-e/cfg.toml @@ -0,0 +1,21 @@ +[package.pkg-1] +service_name = "svc-1" +source.type = "local" +source.paths = [ { from = "tests/service-e/pkg-1-file.txt", to = "/opt/oxide/pkg-1-file.txt" } ] +output.type = "zone" +output.intermediate_only = true + +[package.pkg-2] +service_name = "svc-2" +source.type = "local" +source.rust.binary_names = ["test-service"] +source.rust.release = false +source.paths = [ { from = "tests/service-e/pkg-2-file.txt", to = "/opt/oxide/pkg-2-file.txt" } ] +output.type = "zone" +output.intermediate_only = true + +[package.pkg-3] +service_name = "my-service" +source.type = "composite" +source.packages = [ "pkg-1.tar.gz", "pkg-2.tar.gz" ] +output.type = "zone" diff --git a/tests/service-e/pkg-1-file.txt b/tests/service-e/pkg-1-file.txt new file mode 100644 index 0000000..e69de29 diff --git a/tests/service-e/pkg-2-file.txt b/tests/service-e/pkg-2-file.txt new file mode 100644 index 0000000..e69de29 From 9315bb858ef20a1d23757746bfe5c54268d71a3a Mon Sep 17 00:00:00 2001 From: Sean Klein Date: Fri, 14 Oct 2022 12:30:31 -0700 Subject: [PATCH 2/3] Review feedback --- Cargo.toml | 1 + src/package.rs | 149 +++++++++++++++++++++++++++++++------------------ 2 files changed, 96 insertions(+), 54 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index aedccbb..f7ebe47 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,6 +9,7 @@ description = "Packaging tools for Oxide's control plane software" [dependencies] anyhow = "1.0" +async-trait = "0.1.57" chrono = "0.4.19" filetime = "0.2.16" flate2 = "1.0.24" diff --git a/src/package.rs b/src/package.rs index 91246a6..89d78b2 100644 --- a/src/package.rs +++ b/src/package.rs @@ -7,6 +7,8 @@ use crate::blob::BLOB; use crate::progress::{NoProgress, Progress}; use anyhow::{anyhow, Context, Result}; +use async_trait::async_trait; +use flate2::write::GzEncoder; use serde_derive::Deserialize; use std::collections::BTreeMap; use std::convert::TryInto; @@ -15,6 +17,36 @@ use std::path::{Path, PathBuf}; use tar::Builder; use tokio::io::{AsyncSeekExt, AsyncWriteExt}; +#[async_trait] +trait AsyncAppendFile { + async fn append_file_async

(&mut self, path: P, file: &mut File) -> std::io::Result<()> + where + P: AsRef + Send; + + async fn append_path_with_name_async(&mut self, path: P, name: N) -> std::io::Result<()> + where + P: AsRef + Send, + N: AsRef + Send; +} + +#[async_trait] +impl AsyncAppendFile for Builder { + async fn append_file_async

(&mut self, path: P, file: &mut File) -> std::io::Result<()> + where + P: AsRef + Send, + { + tokio::task::block_in_place(move || self.append_file(path, file)) + } + + async fn append_path_with_name_async(&mut self, path: P, name: N) -> std::io::Result<()> + where + P: AsRef + Send, + N: AsRef + Send, + { + tokio::task::block_in_place(move || self.append_path_with_name(path, name)) + } +} + // Helper to open a tarfile for reading/writing. fn create_tarfile + std::fmt::Debug>(tarfile: P) -> Result { OpenOptions::new() @@ -122,6 +154,27 @@ pub enum PackageSource { Manual, } +impl PackageSource { + fn rust_package(&self) -> Option<&RustPackage> { + match self { + PackageSource::Local { + rust: Some(rust_pkg), + .. + } => Some(rust_pkg), + _ => None, + } + } + + fn blobs(&self) -> Option<&[PathBuf]> { + match self { + PackageSource::Local { + blobs: Some(blobs), .. + } => Some(blobs), + _ => None, + } + } +} + /// Describes the output format of the package. #[derive(Deserialize, Debug, Clone)] #[serde(tag = "type", rename_all = "lowercase")] @@ -167,14 +220,14 @@ pub struct Package { async fn new_zone_archive_builder( package_name: &str, output_directory: &Path, -) -> Result>> { +) -> Result>> { let tarfile = output_directory.join(format!("{}.tar.gz", package_name)); let file = create_tarfile(tarfile)?; // TODO: Consider using async compression, async tar. // It's not the *worst* thing in the world for a packaging tool to block // here, but it would help the other async threads remain responsive if // we avoided blocking. - let gzw = flate2::write::GzEncoder::new(file, flate2::Compression::fast()); + let gzw = GzEncoder::new(file, flate2::Compression::fast()); let mut archive = Builder::new(gzw); archive.mode(tar::HeaderMode::Deterministic); @@ -186,7 +239,9 @@ async fn new_zone_archive_builder( let contents = r#"{"v":"1","t":"layer"}"#; root_json.write_all(contents.as_bytes()).await?; root_json.seek(std::io::SeekFrom::Start(0)).await?; - archive.append_file("oxide.json", &mut root_json.into_std().await)?; + archive + .append_file_async(&Path::new("oxide.json"), &mut root_json.into_std().await) + .await?; Ok(archive) } @@ -277,17 +332,6 @@ impl Package { progress: &impl Progress, archive: &mut Builder, paths: &Vec, - ) -> Result<()> { - tokio::task::block_in_place(move || self.add_paths_sync(progress, archive, paths))?; - Ok(()) - } - - // Add mapped paths to the package. - fn add_paths_sync( - &self, - progress: &impl Progress, - archive: &mut Builder, - paths: &Vec, ) -> Result<()> { progress.set_message("adding paths"); @@ -339,7 +383,8 @@ impl Package { archive.append_dir(&dst, ".")?; } else if entry.file_type().is_file() { archive - .append_path_with_name(entry.path(), &dst) + .append_path_with_name_async(entry.path(), &dst) + .await .context(format!( "Failed to add file '{}' to '{}'", entry.path().display(), @@ -358,29 +403,23 @@ impl Package { Ok(()) } - async fn add_rust( + async fn add_rust( &self, progress: &impl Progress, archive: &mut Builder, ) -> Result<()> { - match &self.source { - PackageSource::Local { - rust: Some(rust_pkg), - .. - } => { - progress.set_message("adding rust binaries"); - let dst = match self.output { - PackageOutput::Zone { .. } => { - let dst = Path::new("/opt/oxide").join(&self.service_name).join("bin"); - add_directory_and_parents(archive, &dst)?; - archive_path(&dst)? - } - PackageOutput::Tarball => PathBuf::from(""), - }; - rust_pkg.add_binaries_to_archive(archive, &dst)?; - progress.increment(1); - } - _ => {} + if let Some(rust_pkg) = self.source.rust_package() { + progress.set_message("adding rust binaries"); + let dst = match self.output { + PackageOutput::Zone { .. } => { + let dst = Path::new("/opt/oxide").join(&self.service_name).join("bin"); + add_directory_and_parents(archive, &dst)?; + archive_path(&dst)? + } + PackageOutput::Tarball => PathBuf::from(""), + }; + rust_pkg.add_binaries_to_archive(archive, &dst).await?; + progress.increment(1); } Ok(()) } @@ -400,20 +439,15 @@ impl Package { destination_path: &Path, ) -> Result<()> { progress.set_message("adding blobs"); - match &self.source { - PackageSource::Local { - blobs: Some(blobs), .. - } => { - let blobs_path = download_directory.join(&self.service_name); - std::fs::create_dir_all(&blobs_path)?; - for blob in blobs { - let blob_path = blobs_path.join(blob); - crate::blob::download(&blob.to_string_lossy(), &blob_path).await?; - progress.increment(1); - } - archive.append_dir_all(&destination_path, &blobs_path)?; + if let Some(blobs) = self.source.blobs() { + let blobs_path = download_directory.join(&self.service_name); + std::fs::create_dir_all(&blobs_path)?; + for blob in blobs { + let blob_path = blobs_path.join(blob); + crate::blob::download(&blob.to_string_lossy(), &blob_path).await?; + progress.increment(1); } - _ => (), + archive.append_dir_all(&destination_path, &blobs_path)?; } Ok(()) } @@ -450,7 +484,10 @@ impl Package { let tmp = tempfile::tempdir()?; for component_package in packages { let component_path = output_directory.join(component_package); - let gzr = flate2::read::GzDecoder::new(open_tarfile(component_path)?); + let gzr = flate2::read::GzDecoder::new(open_tarfile(&component_path)?); + if gzr.header().is_none() { + return Err(anyhow!("Missing gzip header from {}. Note that composite packages can currently only consist of zone images", component_path.display())); + } let mut component_reader = tar::Archive::new(gzr); let entries = component_reader.entries()?; @@ -459,16 +496,19 @@ impl Package { let mut entry = entry?; // Ignore the JSON header files - if entry.path()? == Path::new("oxide.json") { + let entry_path = entry.path()?; + if entry_path == Path::new("oxide.json") { continue; } - let entry_unpack_path = - tmp.path().join(entry.path()?.strip_prefix("root/")?); + let entry_unpack_path = tmp.path().join(entry_path.strip_prefix("root/")?); entry.unpack(&entry_unpack_path)?; + let entry_path = entry.path()?; assert!(entry_unpack_path.exists()); - archive.append_path_with_name(entry_unpack_path, entry.path()?)?; + archive + .append_path_with_name_async(entry_unpack_path, entry_path) + .await?; } } } @@ -539,17 +579,18 @@ impl RustPackage { // // - `archive`: The archive to which the binary should be added // - `dst_directory`: The path where the binary should be added in the archive - fn add_binaries_to_archive( + async fn add_binaries_to_archive( &self, archive: &mut tar::Builder, dst_directory: &Path, ) -> Result<()> { for name in &self.binary_names { archive - .append_path_with_name( + .append_path_with_name_async( Self::local_binary_path(&name, self.release), dst_directory.join(&name), ) + .await .map_err(|err| anyhow!("Cannot append binary to tarfile: {}", err))?; } Ok(()) From 816ffcf6b94bd68f72f63d4532546567bfc1464f Mon Sep 17 00:00:00 2001 From: Sean Klein Date: Mon, 17 Oct 2022 11:23:30 -0400 Subject: [PATCH 3/3] Async append_dir_all --- src/package.rs | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/src/package.rs b/src/package.rs index 89d78b2..03b6793 100644 --- a/src/package.rs +++ b/src/package.rs @@ -27,6 +27,11 @@ trait AsyncAppendFile { where P: AsRef + Send, N: AsRef + Send; + + async fn append_dir_all_async(&mut self, path: P, src_path: Q) -> std::io::Result<()> + where + P: AsRef + Send, + Q: AsRef + Send; } #[async_trait] @@ -45,6 +50,14 @@ impl AsyncAppendFile for Builder { { tokio::task::block_in_place(move || self.append_path_with_name(path, name)) } + + async fn append_dir_all_async(&mut self, path: P, src_path: Q) -> std::io::Result<()> + where + P: AsRef + Send, + Q: AsRef + Send, + { + tokio::task::block_in_place(move || self.append_dir_all(path, src_path)) + } } // Helper to open a tarfile for reading/writing. @@ -431,7 +444,7 @@ impl Package { // - `package`: The package being constructed // - `download_directory`: The location to which the blobs should be downloaded // - `destination_path`: The destination path of the blobs within the archive - async fn add_blobs( + async fn add_blobs( &self, progress: &impl Progress, archive: &mut Builder, @@ -447,7 +460,9 @@ impl Package { crate::blob::download(&blob.to_string_lossy(), &blob_path).await?; progress.increment(1); } - archive.append_dir_all(&destination_path, &blobs_path)?; + archive + .append_dir_all_async(&destination_path, &blobs_path) + .await?; } Ok(()) }