Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 4 additions & 5 deletions src/aws.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,8 @@ use aws_sdk_s3::{
};
use aws_smithy_http::byte_stream::Length;
use log::*;
use temp_dir::TempDir;

use crate::config::Params;
use crate::{backup::Archive, config::Params};

/// In bytes, minimum chunk size of 5MB. Increase CHUNK_SIZE to send larger chunks.
const CHUNK_SIZE: u64 = 1024 * 1024 * 5;
Expand All @@ -25,7 +24,7 @@ const MAX_CHUNKS: u64 = 10000;
///
/// The `_temp_dir` is not used but needs to be kept around until the upload is complete. It going out of scope will
/// delete the temp folder.
pub async fn upload_file(archive_path: PathBuf, _temp_dir: TempDir, params: &Params) -> Result<()> {
pub(crate) async fn upload_file(archive: Archive, params: &Params) -> Result<()> {
// we want to use `from_env` below, so make sure that environment variables are set properly, even if data comes
// from the command line args
env::set_var("AWS_ACCESS_KEY_ID", &params.aws_key_id);
Expand Down Expand Up @@ -63,7 +62,7 @@ pub async fn upload_file(archive_path: PathBuf, _temp_dir: TempDir, params: &Par
let upload_id = multipart_upload_res
.upload_id()
.ok_or_else(|| anyhow!("upload_id not found"))?; // convert option to error if None
let file_size = get_file_size(&archive_path)?;
let file_size = get_file_size(&archive.path)?;
let mut chunk_count = (file_size / CHUNK_SIZE) + 1;
let mut size_of_last_chunk = file_size % CHUNK_SIZE;
// if the file size is exactly a multiple of CHUNK_SIZE, we don't need the extra chunk
Expand All @@ -90,7 +89,7 @@ pub async fn upload_file(archive_path: PathBuf, _temp_dir: TempDir, params: &Par
};
// take the relevant part of the file corresponding to this chunk
let stream = ByteStream::read_from()
.path(&archive_path)
.path(&archive.path)
.offset(chunk_index * CHUNK_SIZE)
.length(Length::Exact(this_chunk))
.build()
Expand Down
25 changes: 18 additions & 7 deletions src/backup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,18 +9,26 @@ use uuid::Uuid;

use crate::{aws::upload_file, config::Params};

/// A compressed archive of a folder.
///
/// The `_temp_dir` is not used but needs to be kept around until the upload is complete. It going out of scope will
/// delete the temp folder.
pub(crate) struct Archive {
pub(crate) path: PathBuf,
_temp_dir: TempDir,
}

/// Perform a backup of the folder, uploading it to Dropbox once complete.
pub async fn backup(params: &Params) -> Result<()> {
let (archive_path, temp_dir) =
compress_folder(&params.folder).with_context(|| anyhow!("compression failed"))?;
upload_file(archive_path, temp_dir, params)
pub(crate) async fn backup(params: &Params) -> Result<()> {
let archive = compress_folder(&params.folder).with_context(|| anyhow!("compression failed"))?;
upload_file(archive, params)
.await
.with_context(|| anyhow!("upload failed"))?;
Ok(())
}

/// Compress the folder into a randomly named tar.gz archive in a temp directory
fn compress_folder(folder: &Path) -> Result<(PathBuf, TempDir)> {
fn compress_folder(folder: &Path) -> Result<Archive> {
// create a temp directory, it will be deleted when the ref goes out of scope
let dir = TempDir::new()?;
// generate a random filename
Expand All @@ -37,6 +45,9 @@ fn compress_folder(folder: &Path) -> Result<(PathBuf, TempDir)> {
let res = tar.into_inner()?;
// make sure the gz layer data is written
res.finish()?;
// we return the temp dir reference to keep it around until the file is uploaded
Ok((file_path, dir))
// we keep temp dir reference to avoid premature deletion
Ok(Archive {
path: file_path,
_temp_dir: dir,
})
}
18 changes: 9 additions & 9 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,25 +64,25 @@ struct Cli {
}

/// Runtime parameters, parsed, validated and ready to be used
pub struct Params {
pub(crate) struct Params {
/// Which folder to backup
pub folder: PathBuf,
pub(crate) folder: PathBuf,
/// An optional interval duration in seconds
pub interval: Option<u64>,
pub(crate) interval: Option<u64>,
/// The optional name of the archive that will be uploaded to S3 (without extension)
pub filename: Option<String>,
pub(crate) filename: Option<String>,
/// The AWS S3 region, defaults to us-east-1
pub aws_region: RegionProviderChain,
pub(crate) aws_region: RegionProviderChain,
/// The AWS S3 bucket name
pub aws_bucket: String,
pub(crate) aws_bucket: String,
/// The AWS S3 access key ID
pub aws_key_id: String,
pub(crate) aws_key_id: String,
/// The AWS S3 access key
pub aws_key: String,
pub(crate) aws_key: String,
}

/// Parse the command-line arguments and environment variables into runtime params
pub async fn parse_config() -> Result<Params> {
pub(crate) async fn parse_config() -> Result<Params> {
// Read from the command-line args, and if not present, check environment variables
let params = Cli::parse();

Expand Down
2 changes: 1 addition & 1 deletion src/prelude.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use std::fmt;

/// An alias for unwrap when the code has been audited to ensure that the value is not None/Err or when panic
/// is required.
pub trait OrPanic<T> {
pub(crate) trait OrPanic<T> {
/// An alias for unwrap when the code has been audited to ensure that the value is not None/Err or when panic
/// is required.
fn or_panic(self) -> T;
Expand Down