diff --git a/generator/.DevConfigs/f8a7b6c5-d4e3-2f1a-0b9c-8d7e6f5a4b3c.json b/generator/.DevConfigs/f8a7b6c5-d4e3-2f1a-0b9c-8d7e6f5a4b3c.json
new file mode 100644
index 000000000000..bc5e6350ecb8
--- /dev/null
+++ b/generator/.DevConfigs/f8a7b6c5-d4e3-2f1a-0b9c-8d7e6f5a4b3c.json
@@ -0,0 +1,11 @@
+{
+ "services": [
+ {
+ "serviceName": "S3",
+ "type": "patch",
+ "changeLogMessages": [
+ "Added UploadWithResponse and UploadWithResponseAsync methods to ITransferUtility interface"
+ ]
+ }
+ ]
+}
diff --git a/sdk/src/Services/S3/Custom/Transfer/_async/ITransferUtility.async.cs b/sdk/src/Services/S3/Custom/Transfer/_async/ITransferUtility.async.cs
index d67a94b00856..938bebf7653e 100644
--- a/sdk/src/Services/S3/Custom/Transfer/_async/ITransferUtility.async.cs
+++ b/sdk/src/Services/S3/Custom/Transfer/_async/ITransferUtility.async.cs
@@ -168,6 +168,122 @@ public partial interface ITransferUtility : IDisposable
///
/// The task object representing the asynchronous operation.
Task UploadAsync(TransferUtilityUploadRequest request, CancellationToken cancellationToken = default(CancellationToken));
+
+ ///
+ /// Uploads the specified file and returns response metadata.
+ /// The object key is derived from the file's name.
+ /// Multiple threads are used to read the file and perform multiple uploads in parallel.
+ /// For large uploads, the file will be divided and uploaded in parts using
+ /// Amazon S3's multipart API. The parts will be reassembled as one object in
+ /// Amazon S3.
+ ///
+ ///
+ ///
+ /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
+ /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
+ /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
+ /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
+ /// you should manually invoke TransferUtility.AbortMultipartUploadsAsync() to abort the incomplete multipart uploads.
+ ///
+ ///
+ ///
+ /// The file path of the file to upload.
+ ///
+ ///
+ /// The target Amazon S3 bucket, that is, the name of the bucket to upload the file to.
+ ///
+ ///
+ /// A cancellation token that can be used by other objects or threads to receive notice of cancellation.
+ ///
+ /// The task object representing the asynchronous operation with upload response metadata.
+ Task UploadWithResponseAsync(string filePath, string bucketName, CancellationToken cancellationToken = default(CancellationToken));
+
+ ///
+ /// Uploads the specified file and returns response metadata.
+ /// Multiple threads are used to read the file and perform multiple uploads in parallel.
+ /// For large uploads, the file will be divided and uploaded in parts using
+ /// Amazon S3's multipart API. The parts will be reassembled as one object in
+ /// Amazon S3.
+ ///
+ ///
+ ///
+ /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
+ /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
+ /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
+ /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
+ /// you should manually invoke TransferUtility.AbortMultipartUploadsAsync() to abort the incomplete multipart uploads.
+ ///
+ ///
+ ///
+ /// The file path of the file to upload.
+ ///
+ ///
+ /// The target Amazon S3 bucket, that is, the name of the bucket to upload the file to.
+ ///
+ ///
+ /// The key under which the Amazon S3 object is stored.
+ ///
+ ///
+ /// A cancellation token that can be used by other objects or threads to receive notice of cancellation.
+ ///
+ /// The task object representing the asynchronous operation with upload response metadata.
+ Task UploadWithResponseAsync(string filePath, string bucketName, string key, CancellationToken cancellationToken = default(CancellationToken));
+
+ ///
+ /// Uploads the contents of the specified stream and returns response metadata.
+ /// For large uploads, the file will be divided and uploaded in parts using
+ /// Amazon S3's multipart API. The parts will be reassembled as one object in
+ /// Amazon S3.
+ ///
+ ///
+ ///
+ /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
+ /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
+ /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
+ /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
+ /// you should manually invoke TransferUtility.AbortMultipartUploadsAsync() to abort the incomplete multipart uploads.
+ ///
+ ///
+ ///
+ /// The stream to read to obtain the content to upload.
+ ///
+ ///
+ /// The target Amazon S3 bucket, that is, the name of the bucket to upload the stream to.
+ ///
+ ///
+ /// The key under which the Amazon S3 object is stored.
+ ///
+ ///
+ /// A cancellation token that can be used by other objects or threads to receive notice of cancellation.
+ ///
+ /// The task object representing the asynchronous operation with upload response metadata.
+ Task UploadWithResponseAsync(Stream stream, string bucketName, string key, CancellationToken cancellationToken = default(CancellationToken));
+
+ ///
+ /// Uploads the file or stream specified by the request and returns response metadata.
+ /// To track the progress of the upload,
+ /// add an event listener to the request's UploadProgressEvent.
+ /// For large uploads, the file will be divided and uploaded in parts using
+ /// Amazon S3's multipart API. The parts will be reassembled as one object in
+ /// Amazon S3.
+ ///
+ ///
+ ///
+ /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
+ /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
+ /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
+ /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
+ /// you should manually invoke TransferUtility.AbortMultipartUploadsAsync() to abort the incomplete multipart uploads.
+ ///
+ ///
+ ///
+ /// Contains all the parameters required to upload to Amazon S3.
+ ///
+ ///
+ /// A cancellation token that can be used by other objects or threads to receive notice of cancellation.
+ ///
+ /// The task object representing the asynchronous operation with upload response metadata.
+ Task UploadWithResponseAsync(TransferUtilityUploadRequest request, CancellationToken cancellationToken = default(CancellationToken));
#endregion
#region AbortMultipartUploads
diff --git a/sdk/src/Services/S3/Custom/Transfer/_async/TransferUtility.async.cs b/sdk/src/Services/S3/Custom/Transfer/_async/TransferUtility.async.cs
index 92307954b039..e1c52c2a6e68 100644
--- a/sdk/src/Services/S3/Custom/Transfer/_async/TransferUtility.async.cs
+++ b/sdk/src/Services/S3/Custom/Transfer/_async/TransferUtility.async.cs
@@ -218,157 +218,28 @@ public partial class TransferUtility : ITransferUtility
}
}
- ///
- /// Uploads the specified file and returns response metadata.
- /// The object key is derived from the file's name.
- /// Multiple threads are used to read the file and perform multiple uploads in parallel.
- /// For large uploads, the file will be divided and uploaded in parts using
- /// Amazon S3's multipart API. The parts will be reassembled as one object in
- /// Amazon S3.
- ///
- ///
- ///
- /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
- /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
- /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
- /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
- /// you should manually invoke TransferUtility.AbortMultipartUploadsAsync() to abort the incomplete multipart uploads.
- ///
- ///
- /// For nonseekable streams or streams with an unknown length, TransferUtility will use multipart upload and buffer up to a part size in memory
- /// until the final part is reached and complete the upload. The buffer for the multipart upload is controlled by S3Constants.MinPartSize
- /// and the default value is 5 megabytes. You can also adjust the read buffer size(i.e.how many bytes to read before writing to the part buffer)
- /// via the BufferSize property on the ClientConfig.The default value for this is 8192 bytes.
- ///
- ///
- ///
- /// The file path of the file to upload.
- ///
- ///
- /// The target Amazon S3 bucket, that is, the name of the bucket to upload the file to.
- ///
- ///
- /// A cancellation token that can be used by other objects or threads to receive notice of cancellation.
- ///
- /// The task object representing the asynchronous operation with upload response metadata.
+ ///
public async Task UploadWithResponseAsync(string filePath, string bucketName, CancellationToken cancellationToken = default(CancellationToken))
{
var request = ConstructUploadRequest(filePath, bucketName);
return await UploadWithResponseAsync(request, cancellationToken).ConfigureAwait(false);
}
- ///
- /// Uploads the specified file and returns response metadata.
- /// Multiple threads are used to read the file and perform multiple uploads in parallel.
- /// For large uploads, the file will be divided and uploaded in parts using
- /// Amazon S3's multipart API. The parts will be reassembled as one object in
- /// Amazon S3.
- ///
- ///
- ///
- /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
- /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
- /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
- /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
- /// you should manually invoke TransferUtility.AbortMultipartUploadsAsync() to abort the incomplete multipart uploads.
- ///
- ///
- /// For nonseekable streams or streams with an unknown length, TransferUtility will use multipart upload and buffer up to a part size in memory
- /// until the final part is reached and complete the upload. The buffer for the multipart upload is controlled by S3Constants.MinPartSize
- /// and the default value is 5 megabytes. You can also adjust the read buffer size(i.e.how many bytes to read before writing to the part buffer)
- /// via the BufferSize property on the ClientConfig.The default value for this is 8192 bytes.
- ///
- ///
- ///
- /// The file path of the file to upload.
- ///
- ///
- /// The target Amazon S3 bucket, that is, the name of the bucket to upload the file to.
- ///
- ///
- /// The key under which the Amazon S3 object is stored.
- ///
- ///
- /// A cancellation token that can be used by other objects or threads to receive notice of cancellation.
- ///
- /// The task object representing the asynchronous operation with upload response metadata.
+ ///
public async Task UploadWithResponseAsync(string filePath, string bucketName, string key, CancellationToken cancellationToken = default(CancellationToken))
{
var request = ConstructUploadRequest(filePath, bucketName, key);
return await UploadWithResponseAsync(request, cancellationToken).ConfigureAwait(false);
}
- ///
- /// Uploads the contents of the specified stream and returns response metadata.
- /// For large uploads, the file will be divided and uploaded in parts using
- /// Amazon S3's multipart API. The parts will be reassembled as one object in
- /// Amazon S3.
- ///
- ///
- ///
- /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
- /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
- /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
- /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
- /// you should manually invoke TransferUtility.AbortMultipartUploadsAsync() to abort the incomplete multipart uploads.
- ///
- ///
- /// For nonseekable streams or streams with an unknown length, TransferUtility will use multipart upload and buffer up to a part size in memory
- /// until the final part is reached and complete the upload. The buffer for the multipart upload is controlled by S3Constants.MinPartSize
- /// and the default value is 5 megabytes. You can also adjust the read buffer size(i.e.how many bytes to read before writing to the part buffer)
- /// via the BufferSize property on the ClientConfig.The default value for this is 8192 bytes.
- ///
- ///
- ///
- /// The stream to read to obtain the content to upload.
- ///
- ///
- /// The target Amazon S3 bucket, that is, the name of the bucket to upload the stream to.
- ///
- ///
- /// The key under which the Amazon S3 object is stored.
- ///
- ///
- /// A cancellation token that can be used by other objects or threads to receive notice of cancellation.
- ///
- /// The task object representing the asynchronous operation with upload response metadata.
+ ///
public async Task UploadWithResponseAsync(Stream stream, string bucketName, string key, CancellationToken cancellationToken = default(CancellationToken))
{
var request = ConstructUploadRequest(stream, bucketName, key);
return await UploadWithResponseAsync(request, cancellationToken).ConfigureAwait(false);
}
- ///
- /// Uploads the file or stream specified by the request and returns response metadata.
- /// To track the progress of the upload,
- /// add an event listener to the request's UploadProgressEvent.
- /// For large uploads, the file will be divided and uploaded in parts using
- /// Amazon S3's multipart API. The parts will be reassembled as one object in
- /// Amazon S3.
- ///
- ///
- ///
- /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
- /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
- /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
- /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
- /// you should manually invoke TransferUtility.AbortMultipartUploadsAsync() to abort the incomplete multipart uploads.
- ///
- ///
- /// For nonseekable streams or streams with an unknown length, TransferUtility will use multipart upload and buffer up to a part size in memory
- /// until the final part is reached and complete the upload. The part size buffer for the multipart upload is controlled by the partSize
- /// specified on the TransferUtilityUploadRequest, and if none is specified it defaults to S3Constants.MinPartSize (5 megabytes).
- /// You can also adjust the read buffer size (i.e. how many bytes to read before adding it to the
- /// part buffer) via the BufferSize property on the ClientConfig. The default value for this is 8192 bytes.
- ///
- ///
- ///
- /// Contains all the parameters required to upload to Amazon S3.
- ///
- ///
- /// A cancellation token that can be used by other objects or threads to receive notice of cancellation.
- ///
- /// The task object representing the asynchronous operation with upload response metadata.
+ ///
public async Task UploadWithResponseAsync(TransferUtilityUploadRequest request, CancellationToken cancellationToken = default(CancellationToken))
{
using(CreateSpan(nameof(UploadWithResponseAsync), null, Amazon.Runtime.Telemetry.Tracing.SpanKind.CLIENT))
diff --git a/sdk/src/Services/S3/Custom/Transfer/_bcl+netstandard/ITransferUtility.sync.cs b/sdk/src/Services/S3/Custom/Transfer/_bcl+netstandard/ITransferUtility.sync.cs
index a492f922a7d2..8444104739e2 100644
--- a/sdk/src/Services/S3/Custom/Transfer/_bcl+netstandard/ITransferUtility.sync.cs
+++ b/sdk/src/Services/S3/Custom/Transfer/_bcl+netstandard/ITransferUtility.sync.cs
@@ -318,5 +318,113 @@ public partial interface ITransferUtility
void AbortMultipartUploads(string bucketName, DateTime initiatedDate);
#endregion
+
+ #region UploadWithResponse
+
+ ///
+ /// Uploads the specified file and returns response metadata.
+ /// The object key is derived from the file's name.
+ /// Multiple threads are used to read the file and perform multiple uploads in parallel.
+ /// For large uploads, the file will be divided and uploaded in parts using
+ /// Amazon S3's multipart API. The parts will be reassembled as one object in
+ /// Amazon S3.
+ ///
+ ///
+ ///
+ /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
+ /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
+ /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
+ /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
+ /// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads.
+ ///
+ ///
+ ///
+ /// The file path of the file to upload.
+ ///
+ ///
+ /// The target Amazon S3 bucket, that is, the name of the bucket to upload the file to.
+ ///
+ /// The upload response metadata.
+ TransferUtilityUploadResponse UploadWithResponse(string filePath, string bucketName);
+
+ ///
+ /// Uploads the specified file and returns response metadata.
+ /// Multiple threads are used to read the file and perform multiple uploads in parallel.
+ /// For large uploads, the file will be divided and uploaded in parts using
+ /// Amazon S3's multipart API. The parts will be reassembled as one object in
+ /// Amazon S3.
+ ///
+ ///
+ ///
+ /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
+ /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
+ /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
+ /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
+ /// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads.
+ ///
+ ///
+ ///
+ /// The file path of the file to upload.
+ ///
+ ///
+ /// The target Amazon S3 bucket, that is, the name of the bucket to upload the file to.
+ ///
+ ///
+ /// The key under which the Amazon S3 object is stored.
+ ///
+ /// The upload response metadata.
+ TransferUtilityUploadResponse UploadWithResponse(string filePath, string bucketName, string key);
+
+ ///
+ /// Uploads the contents of the specified stream and returns response metadata.
+ /// For large uploads, the file will be divided and uploaded in parts using
+ /// Amazon S3's multipart API. The parts will be reassembled as one object in
+ /// Amazon S3.
+ ///
+ ///
+ ///
+ /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
+ /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
+ /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
+ /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
+ /// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads.
+ ///
+ ///
+ ///
+ /// The stream to read to obtain the content to upload.
+ ///
+ ///
+ /// The target Amazon S3 bucket, that is, the name of the bucket to upload the stream to.
+ ///
+ ///
+ /// The key under which the Amazon S3 object is stored.
+ ///
+ /// The upload response metadata.
+ TransferUtilityUploadResponse UploadWithResponse(Stream stream, string bucketName, string key);
+
+ ///
+ /// Uploads the file or stream specified by the request and returns response metadata.
+ /// To track the progress of the upload,
+ /// add an event listener to the request's UploadProgressEvent.
+ /// For large uploads, the file will be divided and uploaded in parts using
+ /// Amazon S3's multipart API. The parts will be reassembled as one object in
+ /// Amazon S3.
+ ///
+ ///
+ ///
+ /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
+ /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
+ /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
+ /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
+ /// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads.
+ ///
+ ///
+ ///
+ /// Contains all the parameters required to upload to Amazon S3.
+ ///
+ /// The upload response metadata.
+ TransferUtilityUploadResponse UploadWithResponse(TransferUtilityUploadRequest request);
+
+ #endregion
}
}
diff --git a/sdk/src/Services/S3/Custom/Transfer/_bcl+netstandard/TransferUtility.sync.cs b/sdk/src/Services/S3/Custom/Transfer/_bcl+netstandard/TransferUtility.sync.cs
index c4d99745e3da..9a627d30c282 100644
--- a/sdk/src/Services/S3/Custom/Transfer/_bcl+netstandard/TransferUtility.sync.cs
+++ b/sdk/src/Services/S3/Custom/Transfer/_bcl+netstandard/TransferUtility.sync.cs
@@ -285,30 +285,7 @@ public void Upload(TransferUtilityUploadRequest request)
}
}
- ///
- /// Uploads the specified file and returns response metadata.
- /// The object key is derived from the file's name.
- /// Multiple threads are used to read the file and perform multiple uploads in parallel.
- /// For large uploads, the file will be divided and uploaded in parts using
- /// Amazon S3's multipart API. The parts will be reassembled as one object in
- /// Amazon S3.
- ///
- ///
- ///
- /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
- /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
- /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
- /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
- /// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads.
- ///
- ///
- ///
- /// The file path of the file to upload.
- ///
- ///
- /// The target Amazon S3 bucket, that is, the name of the bucket to upload the file to.
- ///
- /// The upload response metadata.
+ ///
public TransferUtilityUploadResponse UploadWithResponse(string filePath, string bucketName)
{
try
@@ -322,32 +299,7 @@ public TransferUtilityUploadResponse UploadWithResponse(string filePath, string
}
}
- ///
- /// Uploads the specified file and returns response metadata.
- /// Multiple threads are used to read the file and perform multiple uploads in parallel.
- /// For large uploads, the file will be divided and uploaded in parts using
- /// Amazon S3's multipart API. The parts will be reassembled as one object in
- /// Amazon S3.
- ///
- ///
- ///
- /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
- /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
- /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
- /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
- /// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads.
- ///
- ///
- ///
- /// The file path of the file to upload.
- ///
- ///
- /// The target Amazon S3 bucket, that is, the name of the bucket to upload the file to.
- ///
- ///
- /// The key under which the Amazon S3 object is stored.
- ///
- /// The upload response metadata.
+ ///
public TransferUtilityUploadResponse UploadWithResponse(string filePath, string bucketName, string key)
{
try
@@ -361,31 +313,7 @@ public TransferUtilityUploadResponse UploadWithResponse(string filePath, string
}
}
- ///
- /// Uploads the contents of the specified stream and returns response metadata.
- /// For large uploads, the file will be divided and uploaded in parts using
- /// Amazon S3's multipart API. The parts will be reassembled as one object in
- /// Amazon S3.
- ///
- ///
- ///
- /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
- /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
- /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
- /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
- /// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads.
- ///
- ///
- ///
- /// The stream to read to obtain the content to upload.
- ///
- ///
- /// The target Amazon S3 bucket, that is, the name of the bucket to upload the stream to.
- ///
- ///
- /// The key under which the Amazon S3 object is stored.
- ///
- /// The upload response metadata.
+ ///
public TransferUtilityUploadResponse UploadWithResponse(Stream stream, string bucketName, string key)
{
try
@@ -399,27 +327,7 @@ public TransferUtilityUploadResponse UploadWithResponse(Stream stream, string bu
}
}
- ///
- /// Uploads the file or stream specified by the request and returns response metadata.
- /// To track the progress of the upload,
- /// add an event listener to the request's UploadProgressEvent.
- /// For large uploads, the file will be divided and uploaded in parts using
- /// Amazon S3's multipart API. The parts will be reassembled as one object in
- /// Amazon S3.
- ///
- ///
- ///
- /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request.
- /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload.
- /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able
- /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts,
- /// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads.
- ///
- ///
- ///
- /// Contains all the parameters required to upload to Amazon S3.
- ///
- /// The upload response metadata.
+ ///
public TransferUtilityUploadResponse UploadWithResponse(TransferUtilityUploadRequest request)
{
try