Skip to content

Commit

Permalink
refactor: download rpc client direct from S3
Browse files Browse the repository at this point in the history
Rather than downloading the RPC client via HTTPs, it is retrieved directly from S3 using the AWS
SDK.

This was done because a problem showed up in CI with the `reqwests` library being rejected by
`cargo-deny`, due to a security vulnerability in the dependency chain. It turned out that I had
actually already used the AWS SDK on another branch anyway, for retrieving the logs from S3. So I
was able to do this refactor and remove the dependency on `reqwest`.
  • Loading branch information
jacderida committed Aug 25, 2023
1 parent fd400a0 commit 5ef7168
Show file tree
Hide file tree
Showing 14 changed files with 747 additions and 888 deletions.
1,311 changes: 629 additions & 682 deletions Cargo.lock

Large diffs are not rendered by default.

6 changes: 4 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,22 +9,24 @@ path="src/main.rs"
name="testnet-deploy"

[dependencies]
aws-config = "0.56.0"
aws-sdk-s3 = "0.29.0"
async-trait = "0.1"
clap = { version = "4.2.1", features = ["derive"] }
color-eyre = "0.6.2"
dirs-next = "2.0.0"
dotenv = "0.15.0"
flate2 = "1.0"
indicatif = "0.17.3"
inquire = "0.6.2"
reqwest = { version = "0.11.20", default-features = false, features = ["json", "rustls-tls"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
thiserror = "1.0.23"
tar = "0.4"
tokio = { version = "1.26", features = ["full"] }
tokio-stream = "0.1.14"

[dev-dependencies]
assert_fs = "~1.0"
httpmock = "0.6"
mockall = "0.11.3"
predicates = "2.0"
6 changes: 4 additions & 2 deletions src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,14 @@ pub enum Error {
ExternalCommandRunFailed(String),
#[error("To provision the remaining nodes the multiaddr of the genesis node must be supplied")]
GenesisMultiAddrNotSupplied,
#[error("Failed to retrieve '{0}' from '{1}")]
GetS3ObjectError(String, String),
#[error(transparent)]
InquireError(#[from] inquire::InquireError),
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
Reqwest(#[from] reqwest::Error),
#[error("Error in byte stream when attempting to retrieve S3 object")]
S3ByteStreamError,
#[error(transparent)]
SerdeJson(#[from] serde_json::Error),
#[error("An unexpected error occurred during the setup process")]
Expand Down
12 changes: 6 additions & 6 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ mod tests;
use crate::ansible::{AnsibleRunner, AnsibleRunnerInterface};
use crate::error::{Error, Result};
use crate::rpc_client::{RpcClient, RpcClientInterface};
use crate::s3::S3AssetRepository;
use crate::s3::{S3AssetRepository, S3RepositoryInterface};
use crate::ssh::{SshClient, SshClientInterface};
use crate::terraform::{TerraformRunner, TerraformRunnerInterface};
use flate2::read::GzDecoder;
Expand Down Expand Up @@ -163,7 +163,7 @@ impl TestnetDeployBuilder {
PathBuf::from("./safenode_rpc_client"),
working_directory_path.clone(),
);
let s3_repository = S3AssetRepository::new("https://sn-testnet.s3.eu-west-2.amazonaws.com");
let s3_repository = S3AssetRepository::new("sn-testnet");

let testnet = TestnetDeploy::new(
Box::new(terraform_runner),
Expand All @@ -172,7 +172,7 @@ impl TestnetDeployBuilder {
Box::new(SshClient::new(ssh_secret_key_path)),
working_directory_path,
provider.clone(),
s3_repository,
Box::new(s3_repository),
);

Ok(testnet)
Expand All @@ -186,7 +186,7 @@ pub struct TestnetDeploy {
pub ssh_client: Box<dyn SshClientInterface>,
pub working_directory_path: PathBuf,
pub cloud_provider: CloudProvider,
pub s3_repository: S3AssetRepository,
pub s3_repository: Box<dyn S3RepositoryInterface>,
pub inventory_file_path: PathBuf,
}

Expand All @@ -198,7 +198,7 @@ impl TestnetDeploy {
ssh_client: Box<dyn SshClientInterface>,
working_directory_path: PathBuf,
cloud_provider: CloudProvider,
s3_repository: S3AssetRepository,
s3_repository: Box<dyn S3RepositoryInterface>,
) -> TestnetDeploy {
let inventory_file_path = working_directory_path
.join("ansible")
Expand Down Expand Up @@ -231,7 +231,7 @@ impl TestnetDeploy {
let asset_name = "rpc_client-latest-x86_64-unknown-linux-musl.tar.gz";
let archive_path = self.working_directory_path.join(asset_name);
self.s3_repository
.download_asset(asset_name, &archive_path)
.download_object(asset_name, &archive_path)
.await?;
let archive_file = File::open(archive_path.clone())?;
let decoder = GzDecoder::new(archive_file);
Expand Down
153 changes: 52 additions & 101 deletions src/s3.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,119 +5,70 @@
// Please see the LICENSE file for more details.

use crate::error::{Error, Result};
use indicatif::{ProgressBar, ProgressStyle};
use std::io::Write;
use std::path::PathBuf;
use async_trait::async_trait;
use aws_sdk_s3::Client;
#[cfg(test)]
use mockall::automock;
use std::path::Path;
use tokio::io::AsyncWriteExt;
use tokio_stream::StreamExt;

/// Provides an interface for using the SSH client.
///
/// This trait exists for unit testing: it enables testing behaviour without actually calling the
/// ssh process.
#[cfg_attr(test, automock)]
#[async_trait]
pub trait S3RepositoryInterface {
async fn download_object(&self, object_key: &str, dest_path: &Path) -> Result<()>;
}

pub struct S3AssetRepository {
pub base_url: String,
pub bucket_name: String,
}

impl S3AssetRepository {
pub fn new(base_url: &str) -> Self {
Self {
base_url: base_url.to_string(),
}
}
#[async_trait]
impl S3RepositoryInterface for S3AssetRepository {
async fn download_object(&self, object_key: &str, dest_path: &Path) -> Result<()> {
let conf = aws_config::from_env().region("eu-west-2").load().await;
let client = Client::new(&conf);

/// Downloads the specified binary archive from S3 to the path specified.
///
/// # Arguments
///
/// * `asset_name` - The name of the binary archive to download from the S3 bucket.
/// * `dest_path` - The path where the archive will be saved.
///
/// # Returns
///
/// `Ok(())` on success.
///
/// # Errors
///
/// Returns an `eyre::Report` if an error occurs during the download process. Possible error
/// conditions include network errors, I/O errors, and missing or invalid data in the response
/// body.
pub async fn download_asset(&self, asset_name: &str, dest_path: &PathBuf) -> Result<()> {
let client = reqwest::Client::new();
let mut response = client
.get(format!("{}/{}", self.base_url, asset_name))
println!("Retrieving {object_key} from S3...");
let mut resp = client
.get_object()
.bucket(self.bucket_name.clone())
.key(object_key)
.send()
.await?;
let content_len = response
.content_length()
.ok_or_else(|| Error::AssetContentLengthUndetermined)?;
.await
.map_err(|_| {
Error::GetS3ObjectError(object_key.to_string(), self.bucket_name.clone())
})?;

let pb = ProgressBar::new(content_len);
pb.set_style(
ProgressStyle::default_bar()
.template("{spinner:.green} [{bar:40.cyan/blue}] {bytes}/{total_bytes}")?
.progress_chars("#>-"),
);
if let Some(parent) = dest_path.parent() {
if !parent.exists() {
tokio::fs::create_dir_all(parent).await?;
}
}

let mut downloaded_file = std::fs::File::create(dest_path)?;
let mut bytes_downloaded = 0;
while let Some(chunk) = response.chunk().await? {
downloaded_file.write_all(&chunk)?;
bytes_downloaded += chunk.len() as u64;
pb.set_position(bytes_downloaded);
let mut file = tokio::fs::File::create(&dest_path).await?;
while let Some(bytes) = resp
.body
.try_next()
.await
.map_err(|_| Error::S3ByteStreamError)?
{
file.write_all(&bytes).await?;
}
pb.finish_with_message("{bytes_downloaded}");

println!("Saved at {}", dest_path.to_string_lossy());
Ok(())
}
}

#[cfg(test)]
mod test {
use super::S3AssetRepository;
use assert_fs::prelude::*;
use color_eyre::Result;
use flate2::write::GzEncoder;
use flate2::Compression;
use httpmock::prelude::*;
use predicates::prelude::*;
use std::fs::File;

#[tokio::test]
async fn download_asset_should_download_the_asset_to_the_specified_path() -> Result<()> {
let tmp_data_path = assert_fs::TempDir::new()?;
let rpc_client_archive = tmp_data_path.child("rpc_client.tar.gz");
let downloaded_rpc_client_archive =
tmp_data_path.child("rpc_client-latest-x86_64-unknown-linux-musl.tar.gz");
let fake_rpc_client_bin = tmp_data_path.child("safenode_rpc_client");
fake_rpc_client_bin.write_binary(b"fake code")?;

let mut fake_rpc_client_bin_file = File::open(fake_rpc_client_bin.path())?;
let gz_encoder = GzEncoder::new(
File::create(rpc_client_archive.path())?,
Compression::default(),
);
let mut builder = tar::Builder::new(gz_encoder);
builder.append_file("safe", &mut fake_rpc_client_bin_file)?;
builder.into_inner()?;
let safe_archive_metadata = std::fs::metadata(rpc_client_archive.path())?;

let server = MockServer::start();
let download_asset_mock = server.mock(|when, then| {
when.method(GET)
.path("/rpc_client-latest-x86_64-unknown-linux-musl.tar.gz");
then.status(200)
.header("Content-Length", safe_archive_metadata.len().to_string())
.header("Content-Type", "application/gzip")
.body_from_file(rpc_client_archive.path().to_str().unwrap());
});

let repository = S3AssetRepository::new(&server.base_url());
repository
.download_asset(
"rpc_client-latest-x86_64-unknown-linux-musl.tar.gz",
&downloaded_rpc_client_archive.path().to_path_buf(),
)
.await?;

download_asset_mock.assert();
downloaded_rpc_client_archive.assert(predicate::path::is_file());
let downloaded_file_metadata = std::fs::metadata(downloaded_rpc_client_archive.path())?;
assert_eq!(safe_archive_metadata.len(), downloaded_file_metadata.len());

Ok(())
impl S3AssetRepository {
pub fn new(bucket_name: &str) -> Self {
Self {
bucket_name: bucket_name.to_string(),
}
}
}
4 changes: 2 additions & 2 deletions src/tests/build_safe_network_binaries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ async fn should_run_ansible_to_build_faucet() -> Result<()> {
Box::new(ssh_client),
working_dir.to_path_buf(),
CloudProvider::DigitalOcean,
s3_repository,
Box::new(s3_repository),
);

testnet.init("beta").await?;
Expand Down Expand Up @@ -103,7 +103,7 @@ async fn should_run_ansible_to_build_faucet_and_custom_safenode_bin() -> Result<
Box::new(ssh_client),
working_dir.to_path_buf(),
CloudProvider::DigitalOcean,
s3_repository,
Box::new(s3_repository),
);

testnet.init("beta").await?;
Expand Down
8 changes: 5 additions & 3 deletions src/tests/clean.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ use super::super::{CloudProvider, TestnetDeploy};
use super::setup::*;
use crate::ansible::MockAnsibleRunnerInterface;
use crate::rpc_client::MockRpcClientInterface;
use crate::s3::MockS3RepositoryInterface;
use crate::ssh::MockSshClientInterface;
use crate::terraform::MockTerraformRunnerInterface;
use assert_fs::prelude::*;
Expand Down Expand Up @@ -61,7 +62,7 @@ async fn should_run_terraform_destroy_and_delete_workspace_and_delete_inventory_
Box::new(MockSshClientInterface::new()),
working_dir.to_path_buf(),
CloudProvider::DigitalOcean,
s3_repository,
Box::new(s3_repository),
);

// Calling init will create the Ansible inventory files, which we want to be removed by
Expand All @@ -85,7 +86,8 @@ async fn should_run_terraform_destroy_and_delete_workspace_and_delete_inventory_
#[tokio::test]
async fn should_return_an_error_when_invalid_name_is_supplied() -> Result<()> {
let (tmp_dir, working_dir) = setup_working_directory()?;
let s3_repository = setup_default_s3_repository(&working_dir)?;
let mut s3_repository = MockS3RepositoryInterface::new();
s3_repository.expect_download_object().times(0);
let mut terraform_runner = MockTerraformRunnerInterface::new();
terraform_runner
.expect_workspace_list()
Expand All @@ -105,7 +107,7 @@ async fn should_return_an_error_when_invalid_name_is_supplied() -> Result<()> {
Box::new(MockSshClientInterface::new()),
working_dir.to_path_buf(),
CloudProvider::DigitalOcean,
s3_repository,
Box::new(s3_repository),
);

let result = testnet.clean("beta").await;
Expand Down
6 changes: 4 additions & 2 deletions src/tests/create_infra.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ use super::super::{CloudProvider, TestnetDeploy};
use super::setup::*;
use crate::ansible::MockAnsibleRunnerInterface;
use crate::rpc_client::MockRpcClientInterface;
use crate::s3::MockS3RepositoryInterface;
use crate::ssh::MockSshClientInterface;
use crate::terraform::MockTerraformRunnerInterface;
use color_eyre::Result;
Expand All @@ -19,7 +20,8 @@ use mockall::predicate::*;
#[tokio::test]
async fn should_run_terraform_apply_with_custom_bin_set() -> Result<()> {
let (tmp_dir, working_dir) = setup_working_directory()?;
let s3_repository = setup_default_s3_repository(&working_dir)?;
let mut s3_repository = MockS3RepositoryInterface::new();
s3_repository.expect_download_object().times(0);
let mut terraform_runner = MockTerraformRunnerInterface::new();
terraform_runner
.expect_workspace_select()
Expand All @@ -42,7 +44,7 @@ async fn should_run_terraform_apply_with_custom_bin_set() -> Result<()> {
Box::new(MockSshClientInterface::new()),
working_dir.to_path_buf(),
CloudProvider::DigitalOcean,
s3_repository,
Box::new(s3_repository),
);

testnet.create_infra("beta", 30, true).await?;
Expand Down
6 changes: 4 additions & 2 deletions src/tests/get_genesis_multiaddr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ use super::super::{CloudProvider, TestnetDeploy};
use super::setup::*;
use crate::ansible::MockAnsibleRunnerInterface;
use crate::rpc_client::{MockRpcClientInterface, NodeInfo};
use crate::s3::MockS3RepositoryInterface;
use crate::ssh::MockSshClientInterface;
use crate::terraform::MockTerraformRunnerInterface;
use color_eyre::Result;
Expand All @@ -18,7 +19,8 @@ use std::path::PathBuf;
#[tokio::test]
async fn should_return_the_genesis_multiaddr() -> Result<()> {
let (tmp_dir, working_dir) = setup_working_directory()?;
let s3_repository = setup_default_s3_repository(&working_dir)?;
let mut s3_repository = MockS3RepositoryInterface::new();
s3_repository.expect_download_object().times(0);
let mut ansible_runner = MockAnsibleRunnerInterface::new();
ansible_runner
.expect_inventory_list()
Expand Down Expand Up @@ -50,7 +52,7 @@ async fn should_return_the_genesis_multiaddr() -> Result<()> {
Box::new(MockSshClientInterface::new()),
working_dir.to_path_buf(),
CloudProvider::DigitalOcean,
s3_repository,
Box::new(s3_repository),
);

let (multiaddr, genesis_ip) = testnet.get_genesis_multiaddr("beta").await?;
Expand Down

0 comments on commit 5ef7168

Please sign in to comment.