diff --git a/Cargo.lock b/Cargo.lock index 2f95c0f..f486cbf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -418,7 +418,7 @@ dependencies = [ [[package]] name = "awsbck" -version = "0.2.4" +version = "0.2.5" dependencies = [ "anyhow", "aws-config", diff --git a/Cargo.toml b/Cargo.toml index 46adb07..83431d8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "awsbck" -version = "0.2.4" +version = "0.2.5" edition = "2021" authors = ["Valentin Bersier "] license = "MIT OR Apache-2.0" diff --git a/README.md b/README.md index a93a82a..ff21f4c 100644 --- a/README.md +++ b/README.md @@ -19,6 +19,7 @@ Arguments: Options: -i, --interval Specify an interval in seconds to run the backup periodically [env: AWSBCK_INTERVAL=] + -f, --filename The name of the archive that will be uploaded to S3, without extension (optional) [env: AWSBCK_FILENAME=] -r, --region The AWS S3 region [env: AWS_REGION=] -b, --bucket The AWS S3 bucket name [env: AWS_BUCKET=] --id The AWS S3 access key ID [env: AWS_ACCESS_KEY_ID=] @@ -40,6 +41,16 @@ $ awsbck -i 3600 -b my_bucket /my_folder ## Installation +### Prebuilt binaries + +Check out [the releases](https://github.com/beeb/awsbck-rs/releases) for prebuilt binaries. + +### Cargo + ```shell $ cargo install awsbck ``` + +### Docker + +Coming soon diff --git a/src/aws.rs b/src/aws.rs index f8dfca0..69da93f 100644 --- a/src/aws.rs +++ b/src/aws.rs @@ -24,14 +24,23 @@ pub async fn upload_file(archive_path: PathBuf, _temp_dir: TempDir, params: &Par .load() .await; let client = Client::new(&shared_config); - let filename = format!( - "awsbck_{}.tar.gz", - params - .folder - .file_name() - .map(|f| f.to_string_lossy().to_string()) - .unwrap_or("backup".to_string()) - ); + let filename = params + .filename + .clone() + .map(|f| match f { + f if !f.ends_with(".tar.gz") => format!("{f}.tar.gz"), + f => f, + }) + .unwrap_or_else(|| { + format!( + "awsbck_{}.tar.gz", + params + .folder + .file_name() + .map(|f| f.to_string_lossy().to_string()) + .unwrap_or("backup".to_string()) + ) + }); let multipart_upload_res: CreateMultipartUploadOutput = client .create_multipart_upload() .bucket(¶ms.aws_bucket) diff --git a/src/config.rs b/src/config.rs index 547caf9..a164aca 100644 --- a/src/config.rs +++ b/src/config.rs @@ -21,6 +21,10 @@ struct Cli { #[arg(short, long, value_name = "SECONDS", env = "AWSBCK_INTERVAL")] interval: Option, + /// The name of the archive that will be uploaded to S3, without extension (optional) + #[arg(short, long, value_name = "NAME", env = "AWSBCK_FILENAME")] + filename: Option, + /// The AWS S3 region #[arg( short = 'r', @@ -59,6 +63,8 @@ pub struct Params { pub folder: PathBuf, /// An optional interval duration in seconds pub interval: Option, + /// The name of the archive that will be uploaded to S3 (without extension) + pub filename: Option, /// The AWS S3 region pub aws_region: RegionProviderChain, /// The AWS S3 bucket name @@ -100,6 +106,7 @@ pub async fn parse_config() -> Result { Ok(Params { folder, interval: params.interval, + filename: params.filename, aws_region, aws_bucket, aws_key_id,