Skip to content

Commit

Permalink
Merge pull request #1190 from webern/tough-0.10.0
Browse files Browse the repository at this point in the history
tough: update to v0.10.0
  • Loading branch information
webern committed Jan 25, 2021
2 parents 2cf0129 + 2110613 commit 7ec18f5
Show file tree
Hide file tree
Showing 18 changed files with 280 additions and 670 deletions.
12 changes: 10 additions & 2 deletions sources/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions sources/api/migration/migrator/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,7 @@ regex = "1.1"
semver = "0.11"
simplelog = "0.9"
snafu = "0.6"
tempfile = "3.1.0"
tough = "0.8"
tough = "0.10"
update_metadata = { path = "../../../updater/update_metadata" }
url = "2.1.1"

Expand All @@ -31,6 +30,7 @@ cargo-readme = "3.1"
[dev-dependencies]
chrono = "0.4.11"
storewolf = { path = "../../storewolf" }
tempfile = "3.1.0"

[[bin]]
name = "migrator"
Expand Down
3 changes: 0 additions & 3 deletions sources/api/migration/migrator/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,6 @@ pub(crate) enum Error {
#[snafu(display("Internal error: {}", msg))]
Internal { msg: String },

#[snafu(display("Unable to create tempdir for tough datastore: '{}'", source))]
CreateToughTempDir { source: std::io::Error },

#[snafu(display("Data store path '{}' contains invalid UTF-8", path.display()))]
DataStorePathNotUTF8 { path: PathBuf },

Expand Down
69 changes: 34 additions & 35 deletions sources/api/migration/migrator/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,9 @@ use std::os::unix::fs::symlink;
use std::os::unix::io::AsRawFd;
use std::path::{Path, PathBuf};
use std::process;
use tempfile::TempDir;
use tough::{ExpirationEnforcement, Limits};
use tough::{ExpirationEnforcement, FilesystemTransport, RepositoryLoader};
use update_metadata::load_manifest;
use url::Url;

mod args;
mod direction;
Expand Down Expand Up @@ -113,49 +113,48 @@ pub(crate) fn run(args: &Args) -> Result<()> {
process::exit(0);
});

// Prepare to load the locally cached TUF repository to obtain the manifest. Part of using a
// `TempDir` is disabling timestamp checking, because we want an instance to still come up and
// run migrations regardless of the how the system time relates to what we have cached (for
// example if someone runs an update, then shuts down the instance for several weeks, beyond the
// expiration of at least the cached timestamp.json before booting it back up again). We also
// use a `TempDir` because see no value in keeping a datastore around. The latest known
// versions of the repository metadata will always be the versions of repository metadata we
// have cached on the disk. More info at `ExpirationEnforcement::Unsafe` below.
let tough_datastore = TempDir::new().context(error::CreateToughTempDir)?;
let metadata_url = url::Url::from_directory_path(&args.metadata_directory).map_err(|_| {
// create URLs from the metadata and targets directory paths
let metadata_base_url = Url::from_directory_path(&args.metadata_directory).map_err(|_| {
error::Error::DirectoryUrl {
path: args.metadata_directory.clone(),
}
})?;
let migrations_url =
let targets_base_url =
url::Url::from_directory_path(&args.migration_directory).map_err(|_| {
error::Error::DirectoryUrl {
path: args.migration_directory.clone(),
}
})?;

// open a reader to the root.json file
let root_file = File::open(&args.root_path).with_context(|| error::OpenRoot {
path: args.root_path.clone(),
})?;

// We will load the locally cached TUF repository to obtain the manifest. The Repository is
// loaded using a `TempDir` for its internal Datastore (this is the default). Part of using a
// `TempDir` is disabling timestamp checking, because we want an instance to still come up and
// run migrations regardless of the how the system time relates to what we have cached (for
// example if someone runs an update, then shuts down the instance for several weeks, beyond the
// expiration of at least the cached timestamp.json before booting it back up again). We also
// use a `TempDir` because see no value in keeping a datastore around. The latest known
// versions of the repository metadata will always be the versions of repository metadata we
// have cached on the disk. More info at `ExpirationEnforcement::Unsafe` below.

// Failure to load the TUF repo at the expected location is a serious issue because updog should
// always create a TUF repo that contains at least the manifest, even if there are no migrations.
let repo = tough::Repository::load(
&tough::FilesystemTransport,
tough::Settings {
root: File::open(&args.root_path).context(error::OpenRoot {
path: args.root_path.clone(),
})?,
datastore: tough_datastore.path(),
metadata_base_url: metadata_url.as_str(),
targets_base_url: migrations_url.as_str(),
limits: Limits::default(),
// The threats TUF mitigates are more than the threats we are attempting to mitigate
// here by caching signatures for migrations locally and using them after a reboot but
// prior to Internet connectivity. We are caching the TUF repo and use it while offline
// after a reboot to mitigate binaries being added or modified in the migrations
// directory; the TUF repo is simply a code signing method we already have in place,
// even if it's not one that initially makes sense for this use case. So, we don't care
// if the targets expired between updog downloading them and now.
expiration_enforcement: ExpirationEnforcement::Unsafe,
},
)
.context(error::RepoLoad)?;
let repo = RepositoryLoader::new(root_file, metadata_base_url, targets_base_url)
.transport(FilesystemTransport)
// The threats TUF mitigates are more than the threats we are attempting to mitigate
// here by caching signatures for migrations locally and using them after a reboot but
// prior to Internet connectivity. We are caching the TUF repo and use it while offline
// after a reboot to mitigate binaries being added or modified in the migrations
// directory; the TUF repo is simply a code signing method we already have in place,
// even if it's not one that initially makes sense for this use case. So, we don't care
// if the targets expired between updog downloading them and now.
.expiration_enforcement(ExpirationEnforcement::Unsafe)
.load()
.context(error::RepoLoad)?;
let manifest = load_manifest(&repo).context(error::LoadManifest)?;
let migrations =
update_metadata::find_migrations(&current_version, &args.migrate_to_version, &manifest)
Expand Down Expand Up @@ -222,7 +221,7 @@ where
/// The given data store is used as a starting point; each migration is given the output of the
/// previous migration, and the final output becomes the new data store.
fn run_migrations<P, S>(
repository: &tough::Repository<'_, tough::FilesystemTransport>,
repository: &tough::Repository,
direction: Direction,
migrations: &[S],
source_datastore: P,
Expand Down
12 changes: 8 additions & 4 deletions sources/api/migration/migrator/src/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,9 @@ fn create_test_repo() -> TestRepo {
let one = std::num::NonZeroU64::new(1).unwrap();
editor
.targets_version(one)
.unwrap()
.targets_expires(long_ago)
.unwrap()
.snapshot_version(one)
.snapshot_expires(long_ago)
.timestamp_version(one)
Expand All @@ -154,10 +156,12 @@ fn create_test_repo() -> TestRepo {
})
.for_each(|dir_entry_result| {
let dir_entry = dir_entry_result.unwrap();
editor.add_target(
dir_entry.file_name().to_str().unwrap().into(),
tough::schema::Target::from_path(dir_entry.path()).unwrap(),
);
editor
.add_target(
dir_entry.file_name().to_str().unwrap().into(),
tough::schema::Target::from_path(dir_entry.path()).unwrap(),
)
.unwrap();
});
let signed_repo = editor
.sign(&[Box::new(tough::key_source::LocalKeySource { path: pem() })])
Expand Down
2 changes: 1 addition & 1 deletion sources/updater/update_metadata/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ serde_json = "1.0.40"
serde_plain = "0.3.0"
snafu = "0.6.0"
toml = "0.5"
tough = "0.8"
tough = "0.10"

[lib]
name = "update_metadata"
Expand Down
2 changes: 1 addition & 1 deletion sources/updater/update_metadata/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@ fn find_migrations_forward(
Ok(targets)
}

pub fn load_manifest<T: tough::Transport>(repository: &tough::Repository<T>) -> Result<Manifest> {
pub fn load_manifest(repository: &tough::Repository) -> Result<Manifest> {
let target = "manifest.json";
serde_json::from_reader(
repository
Expand Down
3 changes: 1 addition & 2 deletions sources/updater/updog/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,8 @@ serde_plain = "0.3.0"
signpost = { path = "../signpost" }
simplelog = "0.9"
snafu = "0.6.0"
tempfile = "3.1.0"
toml = "0.5.1"
tough = { version = "0.8", features = ["http"] }
tough = { version = "0.10", features = ["http"] }
update_metadata = { path = "../update_metadata" }
structopt = "0.3"
url = "2.1.0"
Expand Down
99 changes: 6 additions & 93 deletions sources/updater/updog/src/error.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
#![allow(clippy::default_trait_access)]

use semver::Version;
use snafu::{Backtrace, Snafu};
use std::path::PathBuf;
use update_metadata::error::Error as update_metadata_error;
Expand Down Expand Up @@ -40,26 +39,13 @@ pub(crate) enum Error {
backtrace: Backtrace,
},

#[snafu(display("Failed to serialize config file {}: {}", path.display(), source))]
ConfigSerialize {
path: PathBuf,
source: toml::ser::Error,
backtrace: Backtrace,
},

#[snafu(display("Failed to create metadata cache directory '{}': {}", path, source))]
CreateMetadataCache {
path: &'static str,
source: std::io::Error,
backtrace: Backtrace,
},

#[snafu(display("Failed to create a tempdir for tough datastore: {}", source))]
CreateTempDir {
source: std::io::Error,
backtrace: Backtrace,
},

#[snafu(display("Failed to create directory: {:?}", path))]
DirCreate {
backtrace: Backtrace,
Expand All @@ -73,40 +59,13 @@ pub(crate) enum Error {
#[snafu(display("Could not mark inactive partition for boot: {}", source))]
InactivePartitionUpgrade { source: signpost::Error },

#[snafu(display("Failed to attach image to loop device"))]
LoopAttachFailed {
backtrace: Backtrace,
source: std::io::Error,
},

#[snafu(display("Failed to open loop device control"))]
LoopControlFailed {
backtrace: Backtrace,
source: std::io::Error,
},

#[snafu(display("Failed to find free loop device"))]
LoopFindFailed {
backtrace: Backtrace,
source: std::io::Error,
},

#[snafu(display("Could not determine loop device path"))]
LoopNameFailed { backtrace: Backtrace },

#[snafu(display("Failed to decode LZ4-compressed target {}: {}", target, source))]
Lz4Decode {
target: String,
source: std::io::Error,
backtrace: Backtrace,
},

#[snafu(display("Failed to parse updates manifest: {}", source))]
ManifestParse {
source: serde_json::Error,
backtrace: Backtrace,
},

#[snafu(display("Metadata error: {}", source))]
Metadata {
source: tough::error::Error,
Expand All @@ -120,31 +79,6 @@ pub(crate) enum Error {
name: String,
},

#[snafu(display("Migration not found in image: {:?}", name))]
MigrationNotLocal { backtrace: Backtrace, name: PathBuf },

#[snafu(display("Migration ({},{}) not present in manifest", from, to))]
MigrationNotPresent {
backtrace: Backtrace,
from: Version,
to: Version,
},

#[snafu(display("Missing version in metadata: {}", version))]
MissingVersion {
backtrace: Backtrace,
version: String,
},

#[snafu(display("Temporary image mount failed"))]
MountFailed {
backtrace: Backtrace,
source: std::io::Error,
},

#[snafu(display("No update available"))]
NoUpdate { backtrace: Backtrace },

#[snafu(display("Failed to open partition {}: {}", path.display(), source))]
OpenPartition {
path: PathBuf,
Expand Down Expand Up @@ -191,12 +125,6 @@ pub(crate) enum Error {
#[snafu(display("Unable to get OS version: {}", source))]
ReleaseVersion { source: bottlerocket_release::Error },

#[snafu(display("Failed setting permissions of '{}': {}", path.display(), source))]
SetPermissions {
path: PathBuf,
source: std::io::Error,
},

#[snafu(display("Target not found: {}", target))]
TargetNotFound {
target: String,
Expand All @@ -209,36 +137,15 @@ pub(crate) enum Error {
source: std::io::Error,
},

#[snafu(display("2Borrow2Fast"))]
TransportBorrow {
backtrace: Backtrace,
source: std::cell::BorrowMutError,
},

#[snafu(display("No update available"))]
UpdateNotAvailable { backtrace: Backtrace },

#[snafu(display("Update {} exists but wave in the future", version))]
UpdateNotReady {
backtrace: Backtrace,
version: Version,
},

#[snafu(display("Failed to serialize update information: {}", source))]
UpdateSerialize {
source: serde_json::Error,
backtrace: Backtrace,
},

#[snafu(display("Update in the incorrect state"))]
UpdateState { backtrace: Backtrace },

#[snafu(display("Target partition is unrecognized: {}", partition))]
UnknownPartition {
partition: String,
backtrace: Backtrace,
},

#[snafu(display("--wave-file <path> required to add waves to update"))]
WaveFileArg { backtrace: Backtrace },

Expand All @@ -261,6 +168,12 @@ pub(crate) enum Error {

#[snafu(display("Failed to store manifest and migrations: {}", source))]
RepoCacheMigrations { source: tough::error::Error },

#[snafu(display("Unable to parse '{}' as a URL: {}", url, source))]
UrlParse {
source: url::ParseError,
url: String,
},
}

impl std::convert::From<update_metadata::error::Error> for Error {
Expand Down

0 comments on commit 7ec18f5

Please sign in to comment.