Skip to content

Commit

Permalink
Merge pull request #4 from takkt-ag/prepare-release
Browse files Browse the repository at this point in the history
Small documentation improvements
  • Loading branch information
ZyanKLee authored Oct 15, 2024
2 parents 74c9be2 + 7bc49f6 commit 23e6242
Show file tree
Hide file tree
Showing 4 changed files with 67 additions and 5 deletions.
40 changes: 40 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ license = "Apache-2.0"
anyhow = "1.0.89"
aws-config = "1.5.8"
aws-sdk-s3 = { version = "1.55.0", features = ["http-1x"] }
clap = { version = "4.5.20", features = ["derive"] }
clap = { version = "4.5.20", features = ["derive", "wrap_help"] }
http-body = "1.0.1"
http-body-util = "0.1.2"
serde = { version = "1.0.210", features = ["derive"] }
Expand Down
6 changes: 2 additions & 4 deletions src/consts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,16 +42,14 @@ pub(crate) const MINIMUM_PART_NUMBER: u64 = 1;
/// Source: https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html
pub(crate) const MAXIMUM_PART_NUMBER: u64 = 10_000;

/// Part size: 5 MiB to 5 GiB.
/// Minimum part size: 5 MiB
///
/// There is no minimum size limit on the last part of your multipart upload.
///
/// Source: https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html
pub(crate) const MINIMUM_PART_SIZE: u64 = 5 * MiB;

/// Part size: 5 MiB to 5 GiB.
///
/// There is no minimum size limit on the last part of your multipart upload.
/// Maximum part size: 5 GiB
///
/// Source: https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html
pub(crate) const MAXIMUM_PART_SIZE: u64 = 5 * GiB;
24 changes: 24 additions & 0 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,15 @@ impl State {
}
}

/// With Persevere you can upload huge files to S3 without worrying about network interruptions or
/// other issues. Persevere will allow you to resume the upload where it was left off, even in the
/// case of a system crash during upload.
///
/// The contents of the file you upload are always streamed, which means the memory usage of
/// Persevere is minimal, usually below 10 MB. This makes it possible to upload files of any size
/// supported by S3, even if they are larger than the available memory of your system.
///
/// Source: <https://github.com/takkt-ag/persevere>
#[derive(Debug, Parser)]
#[command(version)]
enum Cli {
Expand All @@ -141,6 +150,11 @@ enum Cli {
///
/// * `s3:PutObject`
/// * `s3:AbortMultipartUpload`
///
/// Persevere will automatically discover valid AWS credentials like most AWS SDKs. This means
/// you can provide environment variables such as `AWS_PROFILE` to select the profile you want
/// to upload a file with, or provide the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`
/// directly.
Upload(Upload),
/// Resume the upload of a file to S3.
///
Expand All @@ -156,6 +170,11 @@ enum Cli {
///
/// * `s3:PutObject`
/// * `s3:AbortMultipartUpload`
///
/// Persevere will automatically discover valid AWS credentials like most AWS SDKs. This means
/// you can provide environment variables such as `AWS_PROFILE` to select the profile you want
/// to upload a file with, or provide the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`
/// directly.
Resume(Resume),
/// Abort the upload of a file to S3.
///
Expand All @@ -168,6 +187,11 @@ enum Cli {
///
/// * `s3:PutObject`
/// * `s3:AbortMultipartUpload`
///
/// Persevere will automatically discover valid AWS credentials like most AWS SDKs. This means
/// you can provide environment variables such as `AWS_PROFILE` to select the profile you want
/// to upload a file with, or provide the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`
/// directly.
Abort(Abort),
}

Expand Down

0 comments on commit 23e6242

Please sign in to comment.