Skip to content

Commit

Permalink
Add unpack tests & fix typos #102
Browse files Browse the repository at this point in the history
  • Loading branch information
jpraynaud committed Apr 19, 2022
1 parent 23d6685 commit b5a2505
Show file tree
Hide file tree
Showing 4 changed files with 52 additions and 15 deletions.
2 changes: 1 addition & 1 deletion mithril-network/mithril-client/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
**Install Rust**

- Install a [correctly configured](https://www.rust-lang.org/learn/get-started) Rust toolchain (version 1.58.0+).
- Instal Rust [Clippy](https://github.com/rust-lang/rust-clippy) component.
- Install Rust [Clippy](https://github.com/rust-lang/rust-clippy) component.


## Download source code:
Expand Down
57 changes: 47 additions & 10 deletions mithril-network/mithril-client/src/aggregator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ pub trait AggregatorHandler {
/// Download snapshot
async fn download_snapshot(&self, digest: String, location: String) -> Result<String, String>;

/// Unarchive snapshot
async fn unarchive_snapshot(&self, digest: String) -> Result<String, String>;
/// Unpack snapshot
async fn unpack_snapshot(&self, digest: String) -> Result<String, String>;
}

/// AggregatorHTTPClient is a http client for an aggregator
Expand Down Expand Up @@ -116,7 +116,7 @@ impl AggregatorHandler for AggregatorHTTPClient {
bytes_downloaded,
bytes_total
);
io::stdout().flush().ok().expect("Could not flush stdout");
io::stdout().flush().expect("Could not flush stdout");
}
Ok(local_path.into_os_string().into_string().unwrap())
}
Expand All @@ -127,19 +127,15 @@ impl AggregatorHandler for AggregatorHTTPClient {
}
}

/// Unarchive snapshot
async fn unarchive_snapshot(&self, digest: String) -> Result<String, String> {
/// Unpack snapshot
async fn unpack_snapshot(&self, digest: String) -> Result<String, String> {
debug!("Restore snapshot {}", digest);
println!("Restoring...");
let local_path = archive_file_path(digest, self.config.network.clone())?;
let snapshot_file_tar_gz = fs::File::open(local_path.clone())
.map_err(|e| format!("can't open snapshot file: {}", e))?;
let snapshot_file_tar = GzDecoder::new(snapshot_file_tar_gz);
let unarchive_dir_path = local_path
.clone()
.parent()
.unwrap()
.join(path::Path::new("db"));
let unarchive_dir_path = local_path.parent().unwrap().join(path::Path::new("db"));
let mut snapshot_archive = Archive::new(snapshot_file_tar);
snapshot_archive
.unpack(&unarchive_dir_path)
Expand All @@ -161,6 +157,8 @@ fn archive_file_path(digest: String, network: String) -> Result<path::PathBuf, S
#[cfg(test)]
mod tests {
use super::*;
use flate2::write::GzEncoder;
use flate2::Compression;
use httpmock::prelude::*;
use serde_json::json;
use std::io::Read;
Expand Down Expand Up @@ -313,4 +311,43 @@ mod tests {
let local_file_path = aggregator_client.download_snapshot(digest, location).await;
assert!(local_file_path.is_err());
}

#[tokio::test]
async fn unpack_snapshot_ok() {
let network = "testnet".to_string();
let digest = "digest123".to_string();
let (_, config) = setup_test();
let data_expected = "1234567890".repeat(1024).to_string();
let data_file_name = "data.txt";
let archive_file_path = archive_file_path(digest.clone(), network).unwrap();
let source_directory_name = "src";
let source_file_path = archive_file_path
.parent()
.unwrap()
.join(path::Path::new(source_directory_name))
.join(path::Path::new(data_file_name));
fs::create_dir_all(&source_file_path.parent().unwrap()).unwrap();
let mut source_file = fs::File::create(&source_file_path).unwrap();
write!(source_file, "{}", data_expected).unwrap();
let archive_file = fs::File::create(&archive_file_path).unwrap();
let archive_encoder = GzEncoder::new(&archive_file, Compression::default());
let mut archive_builder = tar::Builder::new(archive_encoder);
archive_builder
.append_dir_all(".", &source_file_path.parent().unwrap())
.unwrap();

archive_builder.into_inner().unwrap().finish().unwrap();
let aggregator_client = AggregatorHTTPClient::new(config.clone());
let local_dir_path = aggregator_client.unpack_snapshot(digest.clone()).await;
local_dir_path.expect("unexpected error");
}

#[tokio::test]
async fn unpack_snapshot_ko_noarchive() {
let digest = "digest123".to_string();
let (_, config) = setup_test();
let aggregator_client = AggregatorHTTPClient::new(config.clone());
let local_dir_path = aggregator_client.unpack_snapshot(digest).await;
assert!(local_dir_path.is_err());
}
}
6 changes: 3 additions & 3 deletions mithril-network/mithril-client/src/aggregator_fake.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ impl AggregatorHandler for AggregatorHandlerFake {
unimplemented!("Download snapshot {} at {}", digest, location);
}

/// Unarchive snapshot
async fn unarchive_snapshot(&self, digest: String) -> Result<String, String> {
unimplemented!("Unarchive snapshot {}", digest);
/// Unpack snapshot
async fn unpack_snapshot(&self, digest: String) -> Result<String, String> {
unimplemented!("Unpack snapshot {}", digest);
}
}
2 changes: 1 addition & 1 deletion mithril-network/mithril-client/src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ where
debug!("Restore snapshot {}", digest);
match &self.aggregator_handler {
Some(aggregator_handler) => {
match aggregator_handler.unarchive_snapshot(digest.clone()).await {
match aggregator_handler.unpack_snapshot(digest.clone()).await {
Ok(to) => Ok(to),
Err(err) => Err(err),
}
Expand Down

0 comments on commit b5a2505

Please sign in to comment.