From f84a787a3312802ed4fdf87145b16099e1cac9a0 Mon Sep 17 00:00:00 2001 From: Rachel Hagerman <110480692+rlhagerm@users.noreply.github.com> Date: Mon, 9 Dec 2024 09:50:10 -0600 Subject: [PATCH 1/4] Adding new files --- .../S3ConditionalRequestsScenario/README.md | 57 +++ .../S3ConditionalRequests/S3ActionsWrapper.cs | 435 ++++++++++++++++++ .../S3ConditionalRequestsScenario.cs | 432 +++++++++++++++++ .../S3ConditionalRequestsScenario.csproj | 29 ++ .../S3ConditionalRequests/settings.json | 4 + .../S3ConditionalRequestsScenario.sln | 31 ++ .../S3ConditionalRequestsScenarioTests.cs | 108 +++++ .../S3ConditionalRequestsTests.csproj | 40 ++ .../S3ConditionalRequestsTests/Usings.cs | 4 + .../testsettings.json | 4 + 10 files changed, 1144 insertions(+) create mode 100644 dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/README.md create mode 100644 dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ActionsWrapper.cs create mode 100644 dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.cs create mode 100644 dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.csproj create mode 100644 dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/settings.json create mode 100644 dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsScenario.sln create mode 100644 dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsScenarioTests.cs create mode 100644 dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsTests.csproj create mode 100644 dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/Usings.cs create mode 100644 dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/testsettings.json diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/README.md b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/README.md new file mode 100644 index 00000000000..ec8f5b0cf00 --- /dev/null +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/README.md @@ -0,0 +1,57 @@ +# Amazon S3 Object Lock Workflow + +## Overview + +This example shows how to use AWS SDKs to work with Amazon Simple Storage Service (Amazon S3) object locking features. The workflow demonstrates how to create, update, view, and modify object locks, as well as how locked objects behave regarding requests to delete and overwrite. + +[Amazon S3 Object Lock](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lock.html) can help prevent Amazon S3 objects from being deleted or overwritten for a fixed amount of time or indefinitely. Object Lock can help meet regulatory requirements or protect against object changes or deletion. + +![Object Lock Features](../../../../workflows/s3_object_lock/resources/Diagram_Amazon-S3-Object-Lock.png) + +This workflow demonstrates the following steps and tasks: +1. Add object lock settings to both new and existing S3 buckets. + 1. Add objects to buckets with optional object lock or retention period settings. +2. Attempt to delete or overwrite locked objects. +3. Retrieve and view the object lock and retention period settings of buckets and objects. +4. Delete the objects and buckets. + 1. Remove any object locks and use the BypassGovernanceRetention setting. + +## ⚠ Important + +* Running this code might result in charges to your AWS account. +* Running the tests might result in charges to your AWS account. +* We recommend that you grant your code least privilege. At most, grant only the minimum permissions required to perform the task. For more information, see [Grant least privilege](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege). +* This code is not tested in every AWS Region. For more information, see [AWS Regional Services](https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services). + +## Scenario + +### Prerequisites + +For general prerequisites, see the [README](../../../README.md) in the `dotnetv3` folder. + +### Resources + +The workflow scenario steps create the buckets and objects needed for the example. No additional resources are required. + +This workflow includes an optional step to add a governance mode retention period of one day to objects in an S3 bucket. In order to delete these objects, you must have the `s3:BypassGovernanceRetention` permission. If you do not have this permission, you will be unable to delete these objects until the retention period has expired. + +### Instructions + +After the example compiles, you can run it from the command line. To do so, navigate to +the folder that contains the .sln file and run the following command: + +``` +dotnet run +``` + +Alternatively, you can run the example from within your IDE. + +This starts an interactive scenario that walks you through creating, exploring, and deleting S3 buckets and objects with various object lock settings. + +## Additional resources + +- [S3 Object Lock](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lock.html) + +--- + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ActionsWrapper.cs b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ActionsWrapper.cs new file mode 100644 index 00000000000..c875a92c353 --- /dev/null +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ActionsWrapper.cs @@ -0,0 +1,435 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[S3LockWorkflow.dotnetv3.S3ActionsWrapper] + +using System.Net; +using Amazon.S3; +using Amazon.S3.Model; +using Microsoft.Extensions.Configuration; + +namespace S3ObjectLockScenario; + +/// +/// Encapsulate the Amazon S3 operations. +/// +public class S3ActionsWrapper +{ + private readonly IAmazonS3 _amazonS3; + + /// + /// Constructor for the S3ActionsWrapper. + /// + /// The injected S3 client. + public S3ActionsWrapper(IAmazonS3 amazonS3, IConfiguration configuration) + { + _amazonS3 = amazonS3; + } + + // snippet-start:[S3LockWorkflow.dotnetv3.CreateBucketWithObjectLock] + /// + /// Create a new Amazon S3 bucket with object lock actions. + /// + /// The name of the bucket to create. + /// True to enable object lock on the bucket. + /// True if successful. + public async Task CreateBucketWithObjectLock(string bucketName, bool enableObjectLock) + { + Console.WriteLine($"\tCreating bucket {bucketName} with object lock {enableObjectLock}."); + try + { + var request = new PutBucketRequest + { + BucketName = bucketName, + UseClientRegion = true, + ObjectLockEnabledForBucket = enableObjectLock, + }; + + var response = await _amazonS3.PutBucketAsync(request); + + return response.HttpStatusCode == System.Net.HttpStatusCode.OK; + } + catch (AmazonS3Exception ex) + { + Console.WriteLine($"Error creating bucket: '{ex.Message}'"); + return false; + } + } + // snippet-end:[S3LockWorkflow.dotnetv3.CreateBucketWithObjectLock] + + // snippet-start:[S3LockWorkflow.dotnetv3.EnableObjectLockOnBucket] + /// + /// Enable object lock on an existing bucket. + /// + /// The name of the bucket to modify. + /// True if successful. + public async Task EnableObjectLockOnBucket(string bucketName) + { + try + { + // First, enable Versioning on the bucket. + await _amazonS3.PutBucketVersioningAsync(new PutBucketVersioningRequest() + { + BucketName = bucketName, + VersioningConfig = new S3BucketVersioningConfig() + { + EnableMfaDelete = false, + Status = VersionStatus.Enabled + } + }); + + var request = new PutObjectLockConfigurationRequest() + { + BucketName = bucketName, + ObjectLockConfiguration = new ObjectLockConfiguration() + { + ObjectLockEnabled = new ObjectLockEnabled("Enabled"), + }, + }; + + var response = await _amazonS3.PutObjectLockConfigurationAsync(request); + Console.WriteLine($"\tAdded an object lock policy to bucket {bucketName}."); + return response.HttpStatusCode == System.Net.HttpStatusCode.OK; + } + catch (AmazonS3Exception ex) + { + Console.WriteLine($"Error modifying object lock: '{ex.Message}'"); + return false; + } + } + // snippet-end:[S3LockWorkflow.dotnetv3.EnableObjectLockOnBucket] + + // snippet-start:[S3LockWorkflow.dotnetv3.ModifyObjectRetentionPeriod] + /// + /// Set or modify a retention period on an object in an S3 bucket. + /// + /// The bucket of the object. + /// The key of the object. + /// The retention mode. + /// The date retention expires. + /// True if successful. + public async Task ModifyObjectRetentionPeriod(string bucketName, + string objectKey, ObjectLockRetentionMode retention, DateTime retainUntilDate) + { + try + { + var request = new PutObjectRetentionRequest() + { + BucketName = bucketName, + Key = objectKey, + Retention = new ObjectLockRetention() + { + Mode = retention, + RetainUntilDate = retainUntilDate + } + }; + + var response = await _amazonS3.PutObjectRetentionAsync(request); + Console.WriteLine($"\tSet retention for {objectKey} in {bucketName} until {retainUntilDate:d}."); + return response.HttpStatusCode == System.Net.HttpStatusCode.OK; + } + catch (AmazonS3Exception ex) + { + Console.WriteLine($"\tError modifying retention period: '{ex.Message}'"); + return false; + } + } + // snippet-end:[S3LockWorkflow.dotnetv3.ModifyObjectRetentionPeriod] + + // snippet-start:[S3LockWorkflow.dotnetv3.ModifyBucketDefaultRetention] + /// + /// Set or modify a retention period on an S3 bucket. + /// + /// The bucket to modify. + /// The retention mode. + /// The date for retention until. + /// True if successful. + public async Task ModifyBucketDefaultRetention(string bucketName, bool enableObjectLock, ObjectLockRetentionMode retention, DateTime retainUntilDate) + { + var enabledString = enableObjectLock ? "Enabled" : "Disabled"; + var timeDifference = retainUntilDate.Subtract(DateTime.Now); + try + { + // First, enable Versioning on the bucket. + await _amazonS3.PutBucketVersioningAsync(new PutBucketVersioningRequest() + { + BucketName = bucketName, + VersioningConfig = new S3BucketVersioningConfig() + { + EnableMfaDelete = false, + Status = VersionStatus.Enabled + } + }); + + var request = new PutObjectLockConfigurationRequest() + { + BucketName = bucketName, + ObjectLockConfiguration = new ObjectLockConfiguration() + { + ObjectLockEnabled = new ObjectLockEnabled(enabledString), + Rule = new ObjectLockRule() + { + DefaultRetention = new DefaultRetention() + { + Mode = retention, + Days = timeDifference.Days // Can be specified in days or years but not both. + } + } + } + }; + + var response = await _amazonS3.PutObjectLockConfigurationAsync(request); + Console.WriteLine($"\tAdded a default retention to bucket {bucketName}."); + return response.HttpStatusCode == System.Net.HttpStatusCode.OK; + } + catch (AmazonS3Exception ex) + { + Console.WriteLine($"\tError modifying object lock: '{ex.Message}'"); + return false; + } + } + // snippet-end:[S3LockWorkflow.dotnetv3.ModifyBucketDefaultRetention] + + // snippet-start:[S3LockWorkflow.dotnetv3.GetObjectRetention] + /// + /// Get the retention period for an S3 object. + /// + /// The bucket of the object. + /// The object key. + /// The object retention details. + public async Task GetObjectRetention(string bucketName, + string objectKey) + { + try + { + var request = new GetObjectRetentionRequest() + { + BucketName = bucketName, + Key = objectKey + }; + + var response = await _amazonS3.GetObjectRetentionAsync(request); + Console.WriteLine($"\tObject retention for {objectKey} in {bucketName}: " + + $"\n\t{response.Retention.Mode} until {response.Retention.RetainUntilDate:d}."); + return response.Retention; + } + catch (AmazonS3Exception ex) + { + Console.WriteLine($"\tUnable to fetch object lock retention: '{ex.Message}'"); + return new ObjectLockRetention(); + } + } + // snippet-end:[S3LockWorkflow.dotnetv3.GetObjectRetention] + + // snippet-start:[S3LockWorkflow.dotnetv3.ModifyObjectLegalHold] + /// + /// Set or modify a legal hold on an object in an S3 bucket. + /// + /// The bucket of the object. + /// The key of the object. + /// The On or Off status for the legal hold. + /// True if successful. + public async Task ModifyObjectLegalHold(string bucketName, + string objectKey, ObjectLockLegalHoldStatus holdStatus) + { + try + { + var request = new PutObjectLegalHoldRequest() + { + BucketName = bucketName, + Key = objectKey, + LegalHold = new ObjectLockLegalHold() + { + Status = holdStatus + } + }; + + var response = await _amazonS3.PutObjectLegalHoldAsync(request); + Console.WriteLine($"\tModified legal hold for {objectKey} in {bucketName}."); + return response.HttpStatusCode == System.Net.HttpStatusCode.OK; + } + catch (AmazonS3Exception ex) + { + Console.WriteLine($"\tError modifying legal hold: '{ex.Message}'"); + return false; + } + } + // snippet-end:[S3LockWorkflow.dotnetv3.ModifyObjectLegalHold] + + // snippet-start:[S3LockWorkflow.dotnetv3.GetObjectLegalHold] + /// + /// Get the legal hold details for an S3 object. + /// + /// The bucket of the object. + /// The object key. + /// The object legal hold details. + public async Task GetObjectLegalHold(string bucketName, + string objectKey) + { + try + { + var request = new GetObjectLegalHoldRequest() + { + BucketName = bucketName, + Key = objectKey + }; + + var response = await _amazonS3.GetObjectLegalHoldAsync(request); + Console.WriteLine($"\tObject legal hold for {objectKey} in {bucketName}: " + + $"\n\tStatus: {response.LegalHold.Status}"); + return response.LegalHold; + } + catch (AmazonS3Exception ex) + { + Console.WriteLine($"\tUnable to fetch legal hold: '{ex.Message}'"); + return new ObjectLockLegalHold(); + } + } + // snippet-end:[S3LockWorkflow.dotnetv3.GetObjectLegalHold] + + // snippet-start:[S3LockWorkflow.dotnetv3.GetBucketObjectLockConfiguration] + /// + /// Get the object lock configuration details for an S3 bucket. + /// + /// The bucket to get details. + /// The bucket's object lock configuration details. + public async Task GetBucketObjectLockConfiguration(string bucketName) + { + try + { + var request = new GetObjectLockConfigurationRequest() + { + BucketName = bucketName + }; + + var response = await _amazonS3.GetObjectLockConfigurationAsync(request); + Console.WriteLine($"\tBucket object lock config for {bucketName} in {bucketName}: " + + $"\n\tEnabled: {response.ObjectLockConfiguration.ObjectLockEnabled}" + + $"\n\tRule: {response.ObjectLockConfiguration.Rule?.DefaultRetention}"); + + return response.ObjectLockConfiguration; + } + catch (AmazonS3Exception ex) + { + Console.WriteLine($"\tUnable to fetch object lock config: '{ex.Message}'"); + return new ObjectLockConfiguration(); + } + } + // snippet-end:[S3LockWorkflow.dotnetv3.GetBucketObjectLockConfiguration] + + // snippet-start:[S3LockWorkflow.dotnetv3.UploadFileAsync] + /// + /// Upload a file from the local computer to an Amazon S3 bucket. + /// + /// The Amazon S3 bucket to use. + /// The object to upload. + /// The path, including file name, of the object to upload. + /// True if success. + public async Task UploadFileAsync(string bucketName, string objectName, string filePath) + { + var request = new PutObjectRequest + { + BucketName = bucketName, + Key = objectName, + FilePath = filePath, + ChecksumAlgorithm = ChecksumAlgorithm.SHA256 + }; + + var response = await _amazonS3.PutObjectAsync(request); + if (response.HttpStatusCode == System.Net.HttpStatusCode.OK) + { + Console.WriteLine($"\tSuccessfully uploaded {objectName} to {bucketName}."); + return true; + } + else + { + Console.WriteLine($"\tCould not upload {objectName} to {bucketName}."); + return false; + } + } + // snippet-end:[S3LockWorkflow.dotnetv3.UploadFileAsync] + + // snippet-start:[S3LockWorkflow.dotnetv3.ListBucketObjectsAndVersions] + /// + /// List bucket objects and versions. + /// + /// The Amazon S3 bucket to use. + /// The list of objects and versions. + public async Task ListBucketObjectsAndVersions(string bucketName) + { + var request = new ListVersionsRequest() + { + BucketName = bucketName + }; + + var response = await _amazonS3.ListVersionsAsync(request); + return response; + } + // snippet-end:[S3LockWorkflow.dotnetv3.ListBucketObjectsAndVersions] + + // snippet-start:[S3LockWorkflow.dotnetv3.DeleteObjectFromBucket] + /// + /// Delete an object from a specific bucket. + /// + /// The Amazon S3 bucket to use. + /// The key of the object to delete. + /// True if the object has retention settings. + /// Optional versionId. + /// True if successful. + public async Task DeleteObjectFromBucket(string bucketName, string objectKey, bool hasRetention, string? versionId = null) + { + try + { + var request = new DeleteObjectRequest() + { + BucketName = bucketName, + Key = objectKey, + VersionId = versionId, + }; + if (hasRetention) + { + // Set the BypassGovernanceRetention header + // if the file has retention settings. + request.BypassGovernanceRetention = true; + } + await _amazonS3.DeleteObjectAsync(request); + Console.WriteLine( + $"Deleted {objectKey} in {bucketName}."); + return true; + } + catch (AmazonS3Exception ex) + { + Console.WriteLine($"\tUnable to delete object {objectKey} in bucket {bucketName}: " + ex.Message); + return false; + } + } + // snippet-end:[S3LockWorkflow.dotnetv3.DeleteObjectFromBucket] + + // snippet-start:[S3LockWorkflow.dotnetv3.DeleteBucketByName] + /// + /// Delete a specific bucket. + /// + /// The Amazon S3 bucket to use. + /// The key of the object to delete. + /// Optional versionId. + /// True if successful. + public async Task DeleteBucketByName(string bucketName) + { + try + { + var request = new DeleteBucketRequest() { BucketName = bucketName, }; + var response = await _amazonS3.DeleteBucketAsync(request); + Console.WriteLine($"\tDelete for {bucketName} complete."); + return response.HttpStatusCode == HttpStatusCode.OK; + } + catch (AmazonS3Exception ex) + { + Console.WriteLine($"\tUnable to delete bucket {bucketName}: " + ex.Message); + return false; + } + + } + // snippet-end:[S3LockWorkflow.dotnetv3.DeleteBucketByName] + +} +// snippet-end:[S3LockWorkflow.dotnetv3.S3ActionsWrapper] \ No newline at end of file diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.cs b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.cs new file mode 100644 index 00000000000..f401d091e66 --- /dev/null +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.cs @@ -0,0 +1,432 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[S3LockWorkflow.dotnetv3.ObjectLockWorkflow] + +using Amazon.S3; +using Amazon.S3.Model; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Console; +using Microsoft.Extensions.Logging.Debug; + +namespace S3ObjectLockScenario; + +public static class S3ConditionalRequestsScenario +{ + /* + Before running this .NET code example, set up your development environment, including your credentials. + + This .NET example performs the following tasks: + 1. Create test Amazon Simple Storage Service (S3) buckets with different lock policies. + 2. Upload sample objects to each bucket. + 3. Set some Legal Hold and Retention Periods on objects and buckets. + 4. Investigate lock policies by viewing settings or attempting to delete or overwrite objects. + 5. Clean up objects and buckets. + */ + + public static S3ActionsWrapper _s3ActionsWrapper = null!; + public static IConfiguration _configuration = null!; + private static string _resourcePrefix = null!; + private static string noLockBucketName = null!; + private static string lockEnabledBucketName = null!; + private static string retentionAfterCreationBucketName = null!; + private static List bucketNames = new List(); + private static List fileNames = new List(); + + public static async Task Main(string[] args) + { + // Set up dependency injection for the Amazon service. + using var host = Host.CreateDefaultBuilder(args) + .ConfigureLogging(logging => + logging.AddFilter("System", LogLevel.Debug) + .AddFilter("Microsoft", LogLevel.Information) + .AddFilter("Microsoft", LogLevel.Trace)) + .ConfigureServices((_, services) => + services.AddAWSService() + .AddTransient() + ) + .Build(); + + _configuration = new ConfigurationBuilder() + .SetBasePath(Directory.GetCurrentDirectory()) + .AddJsonFile("settings.json") // Load settings from .json file. + .AddJsonFile("settings.local.json", + true) // Optionally, load local settings. + .Build(); + + ConfigurationSetup(); + + ServicesSetup(host); + + try + { + Console.WriteLine(new string('-', 80)); + Console.WriteLine("Welcome to the Amazon Simple Storage Service (S3) Object Locking Workflow Scenario."); + Console.WriteLine(new string('-', 80)); + await Setup(true); + + await DemoActionChoices(); + + Console.WriteLine(new string('-', 80)); + Console.WriteLine("Cleaning up resources."); + Console.WriteLine(new string('-', 80)); + await Cleanup(true); + + Console.WriteLine(new string('-', 80)); + Console.WriteLine("Amazon S3 Object Locking Workflow is complete."); + Console.WriteLine(new string('-', 80)); + } + catch (Exception ex) + { + Console.WriteLine(new string('-', 80)); + Console.WriteLine($"There was a problem: {ex.Message}"); + await Cleanup(true); + Console.WriteLine(new string('-', 80)); + } + } + + /// + /// Populate the services for use within the console application. + /// + /// The services host. + private static void ServicesSetup(IHost host) + { + _s3ActionsWrapper = host.Services.GetRequiredService(); + } + + /// + /// Any setup operations needed. + /// + public static void ConfigurationSetup() + { + _resourcePrefix = _configuration["resourcePrefix"] ?? "dotnet-example"; + + noLockBucketName = _resourcePrefix + "-no-lock"; + lockEnabledBucketName = _resourcePrefix + "-lock-enabled"; + retentionAfterCreationBucketName = _resourcePrefix + "-retention-after-creation"; + + bucketNames.Add(noLockBucketName); + bucketNames.Add(lockEnabledBucketName); + bucketNames.Add(retentionAfterCreationBucketName); + } + + // + /// Deploy necessary resources for the scenario. + /// + /// True to run as interactive. + /// True if successful. + public static async Task Setup(bool interactive) + { + Console.WriteLine( + "\nFor this workflow, we will use the AWS SDK for .NET to create several S3\n" + + "buckets and files to demonstrate working with S3 locking features.\n"); + + Console.WriteLine(new string('-', 80)); + Console.WriteLine("Press Enter when you are ready to start."); + if (interactive) + Console.ReadLine(); + + Console.WriteLine("\nS3 buckets can be created either with or without object lock enabled."); + await _s3ActionsWrapper.CreateBucketWithObjectLock(noLockBucketName, false); + await _s3ActionsWrapper.CreateBucketWithObjectLock(lockEnabledBucketName, true); + await _s3ActionsWrapper.CreateBucketWithObjectLock(retentionAfterCreationBucketName, false); + + Console.WriteLine("Press Enter to continue."); + if (interactive) + Console.ReadLine(); + + Console.WriteLine("\nA bucket can be configured to use object locking with a default retention period."); + await _s3ActionsWrapper.ModifyBucketDefaultRetention(retentionAfterCreationBucketName, true, + ObjectLockRetentionMode.Governance, DateTime.UtcNow.AddDays(1)); + + Console.WriteLine("Press Enter to continue."); + if (interactive) + Console.ReadLine(); + + Console.WriteLine("\nObject lock policies can also be added to existing buckets."); + await _s3ActionsWrapper.EnableObjectLockOnBucket(lockEnabledBucketName); + + Console.WriteLine("Press Enter to continue."); + if (interactive) + Console.ReadLine(); + + // Upload some files to the buckets. + Console.WriteLine("\nNow let's add some test files:"); + var fileName = _configuration["exampleFileName"] ?? "exampleFile.txt"; + int fileCount = 2; + // Create the file if it does not already exist. + if (!File.Exists(fileName)) + { + await using StreamWriter sw = File.CreateText(fileName); + await sw.WriteLineAsync( + "This is a sample file for uploading to a bucket."); + } + + foreach (var bucketName in bucketNames) + { + for (int i = 0; i < fileCount; i++) + { + var numberedFileName = Path.GetFileNameWithoutExtension(fileName) + i + Path.GetExtension(fileName); + fileNames.Add(numberedFileName); + await _s3ActionsWrapper.UploadFileAsync(bucketName, numberedFileName, fileName); + } + } + Console.WriteLine("Press Enter to continue."); + if (interactive) + Console.ReadLine(); + + if (!interactive) + return true; + Console.WriteLine("\nNow we can set some object lock policies on individual files:"); + foreach (var bucketName in bucketNames) + { + for (int i = 0; i < fileNames.Count; i++) + { + // No modifications to the objects in the first bucket. + if (bucketName != bucketNames[0]) + { + var exampleFileName = fileNames[i]; + switch (i) + { + case 0: + { + var question = + $"\nWould you like to add a legal hold to {exampleFileName} in {bucketName}? (y/n)"; + if (GetYesNoResponse(question)) + { + // Set a legal hold. + await _s3ActionsWrapper.ModifyObjectLegalHold(bucketName, exampleFileName, ObjectLockLegalHoldStatus.On); + + } + break; + } + case 1: + { + var question = + $"\nWould you like to add a 1 day Governance retention period to {exampleFileName} in {bucketName}? (y/n)" + + "\nReminder: Only a user with the s3:BypassGovernanceRetention permission will be able to delete this file or its bucket until the retention period has expired."; + if (GetYesNoResponse(question)) + { + // Set a Governance mode retention period for 1 day. + await _s3ActionsWrapper.ModifyObjectRetentionPeriod( + bucketName, exampleFileName, + ObjectLockRetentionMode.Governance, + DateTime.UtcNow.AddDays(1)); + } + break; + } + } + } + } + } + Console.WriteLine(new string('-', 80)); + return true; + } + + // + /// List all of the current buckets and objects. + /// + /// True to run as interactive. + /// The list of buckets and objects. + public static async Task> ListBucketsAndObjects(bool interactive) + { + var allObjects = new List(); + foreach (var bucketName in bucketNames) + { + var objectsInBucket = await _s3ActionsWrapper.ListBucketObjectsAndVersions(bucketName); + foreach (var objectKey in objectsInBucket.Versions) + { + allObjects.Add(objectKey); + } + } + + if (interactive) + { + Console.WriteLine("\nCurrent buckets and objects:\n"); + int i = 0; + foreach (var bucketObject in allObjects) + { + i++; + Console.WriteLine( + $"{i}: {bucketObject.Key} \n\tBucket: {bucketObject.BucketName}\n\tVersion: {bucketObject.VersionId}"); + } + } + + return allObjects; + } + + /// + /// Present the user with the demo action choices. + /// + /// Async task. + public static async Task DemoActionChoices() + { + var choices = new string[]{ + "List all files in buckets.", + "Attempt to delete a file.", + "Attempt to delete a file with retention period bypass.", + "Attempt to overwrite a file.", + "View the object and bucket retention settings for a file.", + "View the legal hold settings for a file.", + "Finish the workflow."}; + + var choice = 0; + // Keep asking the user until they choose to move on. + while (choice != 6) + { + Console.WriteLine(new string('-', 80)); + choice = GetChoiceResponse( + "\nExplore the S3 locking features by selecting one of the following choices:" + , choices); + Console.WriteLine(new string('-', 80)); + switch (choice) + { + case 0: + { + await ListBucketsAndObjects(true); + break; + } + case 1: + { + Console.WriteLine("\nEnter the number of the object to delete:"); + var allFiles = await ListBucketsAndObjects(true); + var fileChoice = GetChoiceResponse(null, allFiles.Select(f => f.Key).ToArray()); + await _s3ActionsWrapper.DeleteObjectFromBucket(allFiles[fileChoice].BucketName, allFiles[fileChoice].Key, false, allFiles[fileChoice].VersionId); + break; + } + case 2: + { + Console.WriteLine("\nEnter the number of the object to delete:"); + var allFiles = await ListBucketsAndObjects(true); + var fileChoice = GetChoiceResponse(null, allFiles.Select(f => f.Key).ToArray()); + await _s3ActionsWrapper.DeleteObjectFromBucket(allFiles[fileChoice].BucketName, allFiles[fileChoice].Key, true, allFiles[fileChoice].VersionId); + break; + } + case 3: + { + var allFiles = await ListBucketsAndObjects(true); + Console.WriteLine("\nEnter the number of the object to overwrite:"); + var fileChoice = GetChoiceResponse(null, allFiles.Select(f => f.Key).ToArray()); + // Create the file if it does not already exist. + if (!File.Exists(allFiles[fileChoice].Key)) + { + await using StreamWriter sw = File.CreateText(allFiles[fileChoice].Key); + await sw.WriteLineAsync( + "This is a sample file for uploading to a bucket."); + } + await _s3ActionsWrapper.UploadFileAsync(allFiles[fileChoice].BucketName, allFiles[fileChoice].Key, allFiles[fileChoice].Key); + break; + } + case 4: + { + var allFiles = await ListBucketsAndObjects(true); + Console.WriteLine("\nEnter the number of the object and bucket to view:"); + var fileChoice = GetChoiceResponse(null, allFiles.Select(f => f.Key).ToArray()); + await _s3ActionsWrapper.GetObjectRetention(allFiles[fileChoice].BucketName, allFiles[fileChoice].Key); + await _s3ActionsWrapper.GetBucketObjectLockConfiguration(allFiles[fileChoice].BucketName); + break; + } + case 5: + { + var allFiles = await ListBucketsAndObjects(true); + Console.WriteLine("\nEnter the number of the object to view:"); + var fileChoice = GetChoiceResponse(null, allFiles.Select(f => f.Key).ToArray()); + await _s3ActionsWrapper.GetObjectLegalHold(allFiles[fileChoice].BucketName, allFiles[fileChoice].Key); + break; + } + } + } + return true; + } + + // + /// Clean up the resources from the scenario. + /// + /// True to run as interactive. + /// True if successful. + public static async Task Cleanup(bool interactive) + { + Console.WriteLine(new string('-', 80)); + + if (!interactive || GetYesNoResponse("Do you want to clean up all files and buckets? (y/n) ")) + { + // Remove all locks and delete all buckets and objects. + var allFiles = await ListBucketsAndObjects(false); + foreach (var fileInfo in allFiles) + { + // Check for a legal hold. + var legalHold = await _s3ActionsWrapper.GetObjectLegalHold(fileInfo.BucketName, fileInfo.Key); + if (legalHold?.Status?.Value == ObjectLockLegalHoldStatus.On) + { + await _s3ActionsWrapper.ModifyObjectLegalHold(fileInfo.BucketName, fileInfo.Key, ObjectLockLegalHoldStatus.Off); + } + + // Check for a retention period. + var retention = await _s3ActionsWrapper.GetObjectRetention(fileInfo.BucketName, fileInfo.Key); + var hasRetentionPeriod = retention?.Mode == ObjectLockRetentionMode.Governance && retention.RetainUntilDate > DateTime.UtcNow.Date; + await _s3ActionsWrapper.DeleteObjectFromBucket(fileInfo.BucketName, fileInfo.Key, hasRetentionPeriod, fileInfo.VersionId); + } + + foreach (var bucketName in bucketNames) + { + await _s3ActionsWrapper.DeleteBucketByName(bucketName); + } + + } + else + { + Console.WriteLine( + "Ok, we'll leave the resources intact.\n" + + "Don't forget to delete them when you're done with them or you might incur unexpected charges." + ); + } + + Console.WriteLine(new string('-', 80)); + return true; + } + + /// + /// Helper method to get a yes or no response from the user. + /// + /// The question string to print on the console. + /// True if the user responds with a yes. + private static bool GetYesNoResponse(string question) + { + Console.WriteLine(question); + var ynResponse = Console.ReadLine(); + var response = ynResponse != null && ynResponse.Equals("y", StringComparison.InvariantCultureIgnoreCase); + return response; + } + + /// + /// Helper method to get a choice response from the user. + /// + /// The question string to print on the console. + /// The choices to print on the console. + /// The index of the selected choice + private static int GetChoiceResponse(string? question, string[] choices) + { + if (question != null) + { + Console.WriteLine(question); + + for (int i = 0; i < choices.Length; i++) + { + Console.WriteLine($"\t{i + 1}. {choices[i]}"); + } + } + + var choiceNumber = 0; + while (choiceNumber < 1 || choiceNumber > choices.Length) + { + var choice = Console.ReadLine(); + Int32.TryParse(choice, out choiceNumber); + } + + return choiceNumber - 1; + } +} +// snippet-end:[S3LockWorkflow.dotnetv3.ObjectLockWorkflow] \ No newline at end of file diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.csproj b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.csproj new file mode 100644 index 00000000000..4eb4d3dc299 --- /dev/null +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.csproj @@ -0,0 +1,29 @@ + + + + Exe + net6.0 + enable + enable + + + + + + + + + + + + + + PreserveNewest + + + PreserveNewest + settings.json + + + + diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/settings.json b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/settings.json new file mode 100644 index 00000000000..872c2a7f9a7 --- /dev/null +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/settings.json @@ -0,0 +1,4 @@ +{ + "resourcePrefix": "dotnet-s3-lock-example", + "exampleFileName": "dotnet-example-file.txt" +} diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsScenario.sln b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsScenario.sln new file mode 100644 index 00000000000..6a61a441bc9 --- /dev/null +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsScenario.sln @@ -0,0 +1,31 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.7.34221.43 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "S3ConditionalRequestsScenario", "S3ConditionalRequests\S3ConditionalRequestsScenario.csproj", "{DE58C919-2A2E-4768-AC07-5A5C7E34CE53}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "S3ConditionalRequestsTests", "S3ConditionalRequestsTests\S3ConditionalRequestsTests.csproj", "{E1F6B013-7462-4C31-8103-EA359ECCC653}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {DE58C919-2A2E-4768-AC07-5A5C7E34CE53}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DE58C919-2A2E-4768-AC07-5A5C7E34CE53}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DE58C919-2A2E-4768-AC07-5A5C7E34CE53}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DE58C919-2A2E-4768-AC07-5A5C7E34CE53}.Release|Any CPU.Build.0 = Release|Any CPU + {E1F6B013-7462-4C31-8103-EA359ECCC653}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E1F6B013-7462-4C31-8103-EA359ECCC653}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E1F6B013-7462-4C31-8103-EA359ECCC653}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E1F6B013-7462-4C31-8103-EA359ECCC653}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {E2318334-46A6-4814-892D-EE6AE0307BEC} + EndGlobalSection +EndGlobal diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsScenarioTests.cs b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsScenarioTests.cs new file mode 100644 index 00000000000..04a65ae6b34 --- /dev/null +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsScenarioTests.cs @@ -0,0 +1,108 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +using Amazon.S3; +using Microsoft.Extensions.Configuration; +using S3ObjectLockScenario; +using Xunit.Extensions.Ordering; + +namespace S3ObjectLockTests; + +/// +/// Tests for the ObjectLockScenario example. +/// +public class S3ConditionalRequestsScenarioTests +{ + private readonly IConfiguration _configuration; + + private readonly S3ActionsWrapper _s3ActionsWrapper = null!; + private readonly string _resourcePrefix; + + /// + /// Constructor for the test class. + /// + public S3ConditionalRequestsScenarioTests() + { + _configuration = new ConfigurationBuilder() + .SetBasePath(Directory.GetCurrentDirectory()) + .AddJsonFile("testsettings.json") // Load test settings from .json file. + .AddJsonFile("testsettings.local.json", + true) // Optionally, load local settings. + .Build(); + + _resourcePrefix = _configuration["resourcePrefix"] ?? "dotnet-example"; + + _s3ActionsWrapper = new S3ActionsWrapper( + new AmazonS3Client(), + _configuration); + + S3ObjectLockWorkflow._s3ActionsWrapper = _s3ActionsWrapper; + S3ObjectLockWorkflow._configuration = _configuration; + } + + /// + /// Run the setup step of the workflow. Should return successful. + /// + /// Async task. + [Fact] + [Order(1)] + [Trait("Category", "Integration")] + public async Task TestSetup() + { + // Arrange. + S3ObjectLockWorkflow.ConfigurationSetup(); + + // Act. + var success = await S3ObjectLockWorkflow.Setup(false); + + var finished = false; + while (!finished) + { + // Make sure the buckets are available before moving on. + var created = await S3ObjectLockWorkflow.ListBucketsAndObjects(false); + finished = created.Count > 0; + } + + // Assert. + Assert.True(success); + } + + /// + /// Run the list object step of the workflow. Should return successful. + /// + /// Async task. + [Fact] + [Order(2)] + [Trait("Category", "Integration")] + public async Task TestObjects() + { + // Arrange. + S3ObjectLockWorkflow.ConfigurationSetup(); + + // Act. + var objects = await S3ObjectLockWorkflow.ListBucketsAndObjects(false); + + // Assert. + Assert.NotEmpty(objects); + } + + + /// + /// Run the cleanup step of the workflow. Should return successful. + /// + /// Async task. + [Fact] + [Order(3)] + [Trait("Category", "Integration")] + public async Task TestCleanup() + { + // Arrange. + S3ObjectLockWorkflow.ConfigurationSetup(); + + // Act. + var success = await S3ObjectLockWorkflow.Cleanup(false); + + // Assert. + Assert.True(success); + } +} \ No newline at end of file diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsTests.csproj b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsTests.csproj new file mode 100644 index 00000000000..5cd1fc94523 --- /dev/null +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsTests.csproj @@ -0,0 +1,40 @@ + + + + net6.0 + enable + enable + + false + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + PreserveNewest + + + PreserveNewest + testsettings.json + + + + + + + + diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/Usings.cs b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/Usings.cs new file mode 100644 index 00000000000..47af9ec2f2c --- /dev/null +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/Usings.cs @@ -0,0 +1,4 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +global using Xunit; \ No newline at end of file diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/testsettings.json b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/testsettings.json new file mode 100644 index 00000000000..872c2a7f9a7 --- /dev/null +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/testsettings.json @@ -0,0 +1,4 @@ +{ + "resourcePrefix": "dotnet-s3-lock-example", + "exampleFileName": "dotnet-example-file.txt" +} From fcb5ba8e747f428220b69ae5f2073a017e0562e2 Mon Sep 17 00:00:00 2001 From: Rachel Hagerman <110480692+rlhagerm@users.noreply.github.com> Date: Mon, 23 Dec 2024 13:14:22 -0600 Subject: [PATCH 2/4] Updating scenario files. --- .doc_gen/metadata/s3_metadata.yaml | 24 + dotnetv3/S3/README.md | 15 +- .../S3ConditionalRequests/Enums.cs | 12 + .../S3ConditionalRequests/S3ActionsWrapper.cs | 494 ++++++++---------- .../S3ConditionalRequestsScenario.cs | 433 +++++++-------- .../S3ConditionalRequestsScenario.csproj | 15 +- .../S3ConditionalRequests/settings.json | 3 +- .../S3ConditionalRequestsScenarioTests.cs | 93 ++-- .../S3ConditionalRequestsTests.csproj | 18 +- .../testsettings.json | 3 +- 10 files changed, 510 insertions(+), 600 deletions(-) create mode 100644 dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/Enums.cs diff --git a/.doc_gen/metadata/s3_metadata.yaml b/.doc_gen/metadata/s3_metadata.yaml index ac313b8cf5b..eb7cba7f897 100644 --- a/.doc_gen/metadata/s3_metadata.yaml +++ b/.doc_gen/metadata/s3_metadata.yaml @@ -236,6 +236,10 @@ s3_CopyObject: - description: snippet_tags: - S3.dotnet35.CopyObject + - description: Copy an object using a conditional request. + genai: some + snippet_tags: + - S3ConditionalRequests.dotnetv3.CopyObjectConditional C++: versions: - sdk_version: 1 @@ -854,6 +858,10 @@ s3_GetObject: - description: snippet_tags: - S3.dotnetv3.S3_Basics-DownloadObject + - description: Get an object using a conditional request. + genai: some + snippet_tags: + - S3ConditionalRequests.dotnetv3.GetObjectConditional C++: versions: - sdk_version: 1 @@ -1491,6 +1499,10 @@ s3_PutObject: - description: Upload an object with server-side encryption. snippet_tags: - S3.dotnetv3.ServerSideEncryptionExample + - description: Put an object using a conditional request. + genai: some + snippet_tags: + - S3ConditionalRequests.dotnetv3.PutObjectConditional C++: versions: - sdk_version: 1 @@ -3593,6 +3605,18 @@ s3_Scenario_ConditionalRequests: genai: some snippet_tags: - python.example_code.s3.S3ConditionalRequests.wrapper + .NET: + versions: + - sdk_version: 3 + github: dotnetv3/S3/scenarios/S3ConditionalRequestsScenario + sdkguide: + excerpts: + - description: Run an interactive scenario demonstrating &S3; conditional request features. + snippet_tags: + - S3ConditionalRequests.dotnetv3.Scenario + - description: A wrapper class for S3 functions. + snippet_tags: + - S3ConditionalRequests.dotnetv3.S3ActionsWrapper services: s3: {GetObject, PutObject, CopyObject} s3_Scenario_DownloadS3Directory: diff --git a/dotnetv3/S3/README.md b/dotnetv3/S3/README.md index f3bedcc6b54..9a43fea4de2 100644 --- a/dotnetv3/S3/README.md +++ b/dotnetv3/S3/README.md @@ -84,6 +84,7 @@ functions within the same service. - [Get started with encryption](SSEClientEncryptionExample/SSEClientEncryption.cs) - [Get started with tags](ObjectTagExample/ObjectTag.cs) - [Lock Amazon S3 objects](scenarios/S3ObjectLockScenario/S3ObjectLockWorkflow/S3ObjectLockWorkflow.cs) +- [Make conditional requests](scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.cs) - [Manage access control lists (ACLs)](ManageACLsExample/ManageACLs.cs) - [Perform a multipart copy](MPUapiCopyObjExample/MPUapiCopyObj.cs) - [Transform data with S3 Object Lambda](../cross-service/S3ObjectLambdaFunction) @@ -209,6 +210,18 @@ This example shows you how to work with S3 object lock features. +#### Make conditional requests + +This example shows you how to add preconditions to Amazon S3 requests. + + + + + + + + + #### Manage access control lists (ACLs) This example shows you how to manage access control lists (ACLs) for Amazon S3 buckets. @@ -283,4 +296,4 @@ in the `dotnetv3` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/Enums.cs b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/Enums.cs new file mode 100644 index 00000000000..2832bbe46db --- /dev/null +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/Enums.cs @@ -0,0 +1,12 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +namespace S3ConditionalRequestsScenario; + +public enum S3ConditionType +{ + IfMatch, + IfNoneMatch, + IfModifiedSince, + IfUnmodifiedSince +} \ No newline at end of file diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ActionsWrapper.cs b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ActionsWrapper.cs index c875a92c353..aa62c496e49 100644 --- a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ActionsWrapper.cs +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ActionsWrapper.cs @@ -1,14 +1,14 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -// snippet-start:[S3LockWorkflow.dotnetv3.S3ActionsWrapper] +// snippet-start:[S3ConditionalRequests.dotnetv3.S3ActionsWrapper] using System.Net; using Amazon.S3; using Amazon.S3.Model; -using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Logging; -namespace S3ObjectLockScenario; +namespace S3ConditionalRequestsScenario; /// /// Encapsulate the Amazon S3 operations. @@ -16,385 +16,321 @@ namespace S3ObjectLockScenario; public class S3ActionsWrapper { private readonly IAmazonS3 _amazonS3; + private readonly ILogger _logger; /// /// Constructor for the S3ActionsWrapper. /// /// The injected S3 client. - public S3ActionsWrapper(IAmazonS3 amazonS3, IConfiguration configuration) + /// The class logger. + public S3ActionsWrapper(IAmazonS3 amazonS3, ILogger logger) { _amazonS3 = amazonS3; + _logger = logger; } - // snippet-start:[S3LockWorkflow.dotnetv3.CreateBucketWithObjectLock] + // snippet-start:[S3ConditionalRequests.dotnetv3.GetObjectConditional] /// - /// Create a new Amazon S3 bucket with object lock actions. + /// Retrieves an object from Amazon S3 with a conditional request. /// - /// The name of the bucket to create. - /// True to enable object lock on the bucket. - /// True if successful. - public async Task CreateBucketWithObjectLock(string bucketName, bool enableObjectLock) + /// The key of the object to retrieve. + /// The source bucket of the object. + /// The type of condition: 'IfMatch', 'IfNoneMatch', 'IfModifiedSince', 'IfUnmodifiedSince'. + /// The value to use for the condition for dates. + /// The value to use for the condition for etags. + /// True if the conditional read is successful, False otherwise. + public async Task GetObjectConditional(string objectKey, string sourceBucket, + S3ConditionType conditionType, DateTime? conditionDateValue = null, string? etagConditionalValue = null) { - Console.WriteLine($"\tCreating bucket {bucketName} with object lock {enableObjectLock}."); try { - var request = new PutBucketRequest + var getObjectRequest = new GetObjectRequest { - BucketName = bucketName, - UseClientRegion = true, - ObjectLockEnabledForBucket = enableObjectLock, + BucketName = sourceBucket, + Key = objectKey }; - var response = await _amazonS3.PutBucketAsync(request); + switch (conditionType) + { + case S3ConditionType.IfMatch: + getObjectRequest.EtagToMatch = etagConditionalValue; + break; + case S3ConditionType.IfNoneMatch: + getObjectRequest.EtagToNotMatch = etagConditionalValue; + break; + case S3ConditionType.IfModifiedSince: + getObjectRequest.ModifiedSinceDateUtc = conditionDateValue.GetValueOrDefault(); + break; + case S3ConditionType.IfUnmodifiedSince: + getObjectRequest.UnmodifiedSinceDateUtc = conditionDateValue.GetValueOrDefault(); + break; + default: + throw new ArgumentOutOfRangeException(nameof(conditionType), conditionType, null); + } - return response.HttpStatusCode == System.Net.HttpStatusCode.OK; - } - catch (AmazonS3Exception ex) - { - Console.WriteLine($"Error creating bucket: '{ex.Message}'"); - return false; + var response = await _amazonS3.GetObjectAsync(getObjectRequest); + var sampleBytes = new byte[20]; + await response.ResponseStream.ReadAsync(sampleBytes, 0, 20); + _logger.LogInformation($"Conditional read successful. Here are the first 20 bytes of the object:\n{System.Text.Encoding.UTF8.GetString(sampleBytes)}"); + return true; } - } - // snippet-end:[S3LockWorkflow.dotnetv3.CreateBucketWithObjectLock] - - // snippet-start:[S3LockWorkflow.dotnetv3.EnableObjectLockOnBucket] - /// - /// Enable object lock on an existing bucket. - /// - /// The name of the bucket to modify. - /// True if successful. - public async Task EnableObjectLockOnBucket(string bucketName) - { - try + catch (AmazonS3Exception e) { - // First, enable Versioning on the bucket. - await _amazonS3.PutBucketVersioningAsync(new PutBucketVersioningRequest() + if (e.ErrorCode == "PreconditionFailed") { - BucketName = bucketName, - VersioningConfig = new S3BucketVersioningConfig() - { - EnableMfaDelete = false, - Status = VersionStatus.Enabled - } - }); - - var request = new PutObjectLockConfigurationRequest() + _logger.LogError("Conditional read failed: Precondition failed"); + } + else if (e.ErrorCode == "NotModified") { - BucketName = bucketName, - ObjectLockConfiguration = new ObjectLockConfiguration() - { - ObjectLockEnabled = new ObjectLockEnabled("Enabled"), - }, - }; - - var response = await _amazonS3.PutObjectLockConfigurationAsync(request); - Console.WriteLine($"\tAdded an object lock policy to bucket {bucketName}."); - return response.HttpStatusCode == System.Net.HttpStatusCode.OK; - } - catch (AmazonS3Exception ex) - { - Console.WriteLine($"Error modifying object lock: '{ex.Message}'"); + _logger.LogError("Conditional read failed: Object not modified"); + } + else + { + _logger.LogError($"Unexpected error: {e.ErrorCode}"); + throw; + } return false; } } - // snippet-end:[S3LockWorkflow.dotnetv3.EnableObjectLockOnBucket] + // snippet-end:[S3ConditionalRequests.dotnetv3.GetObjectConditional] - // snippet-start:[S3LockWorkflow.dotnetv3.ModifyObjectRetentionPeriod] + // snippet-start:[S3ConditionalRequests.dotnetv3.PutObjectConditional] /// - /// Set or modify a retention period on an object in an S3 bucket. + /// Uploads an object to Amazon S3 with a conditional request. Prevents overwrite using an IfNoneMatch condition for the object key. /// - /// The bucket of the object. - /// The key of the object. - /// The retention mode. - /// The date retention expires. - /// True if successful. - public async Task ModifyObjectRetentionPeriod(string bucketName, - string objectKey, ObjectLockRetentionMode retention, DateTime retainUntilDate) + /// The key of the object to upload. + /// The source bucket of the object. + /// The content to upload as a string. + /// The ETag if the conditional write is successful, empty otherwise. + public async Task PutObjectConditional(string objectKey, string bucket, string content) { try { - var request = new PutObjectRetentionRequest() + var putObjectRequest = new PutObjectRequest { - BucketName = bucketName, + BucketName = bucket, Key = objectKey, - Retention = new ObjectLockRetention() - { - Mode = retention, - RetainUntilDate = retainUntilDate - } + ContentBody = content, + IfNoneMatch = "*" }; - var response = await _amazonS3.PutObjectRetentionAsync(request); - Console.WriteLine($"\tSet retention for {objectKey} in {bucketName} until {retainUntilDate:d}."); - return response.HttpStatusCode == System.Net.HttpStatusCode.OK; + var putResult = await _amazonS3.PutObjectAsync(putObjectRequest); + _logger.LogInformation($"Conditional write successful for key {objectKey} in bucket {bucket}."); + return putResult.ETag; } - catch (AmazonS3Exception ex) + catch (AmazonS3Exception e) { - Console.WriteLine($"\tError modifying retention period: '{ex.Message}'"); - return false; + if (e.ErrorCode == "PreconditionFailed") + { + _logger.LogError("Conditional write failed: Precondition failed"); + } + else + { + _logger.LogError($"Unexpected error: {e.ErrorCode}"); + throw; + } + return string.Empty; } } - // snippet-end:[S3LockWorkflow.dotnetv3.ModifyObjectRetentionPeriod] + // snippet-end:[S3ConditionalRequests.dotnetv3.PutObjectConditional] - // snippet-start:[S3LockWorkflow.dotnetv3.ModifyBucketDefaultRetention] + // snippet-start:[S3ConditionalRequests.dotnetv3.CopyObjectConditional] /// - /// Set or modify a retention period on an S3 bucket. + /// Copies an object from one Amazon S3 bucket to another with a conditional request. /// - /// The bucket to modify. - /// The retention mode. - /// The date for retention until. - /// True if successful. - public async Task ModifyBucketDefaultRetention(string bucketName, bool enableObjectLock, ObjectLockRetentionMode retention, DateTime retainUntilDate) + /// The key of the source object to copy. + /// The key of the destination object. + /// The source bucket of the object. + /// The destination bucket of the object. + /// The type of condition to apply, e.g. 'CopySourceIfMatch', 'CopySourceIfNoneMatch', 'CopySourceIfModifiedSince', 'CopySourceIfUnmodifiedSince'. + /// The value to use for the condition for dates. + /// The value to use for the condition for etags. + /// True if the conditional copy is successful, False otherwise. + public async Task CopyObjectConditional(string sourceKey, string destKey, string sourceBucket, string destBucket, + S3ConditionType conditionType, DateTime? conditionDateValue = null, string? etagConditionalValue = null) { - var enabledString = enableObjectLock ? "Enabled" : "Disabled"; - var timeDifference = retainUntilDate.Subtract(DateTime.Now); try { - // First, enable Versioning on the bucket. - await _amazonS3.PutBucketVersioningAsync(new PutBucketVersioningRequest() + var copyObjectRequest = new CopyObjectRequest { - BucketName = bucketName, - VersioningConfig = new S3BucketVersioningConfig() - { - EnableMfaDelete = false, - Status = VersionStatus.Enabled - } - }); + DestinationBucket = destBucket, + DestinationKey = destKey, + SourceBucket = sourceBucket, + SourceKey = sourceKey + }; - var request = new PutObjectLockConfigurationRequest() + switch (conditionType) { - BucketName = bucketName, - ObjectLockConfiguration = new ObjectLockConfiguration() - { - ObjectLockEnabled = new ObjectLockEnabled(enabledString), - Rule = new ObjectLockRule() - { - DefaultRetention = new DefaultRetention() - { - Mode = retention, - Days = timeDifference.Days // Can be specified in days or years but not both. - } - } - } - }; + case S3ConditionType.IfMatch: + copyObjectRequest.ETagToMatch = etagConditionalValue; + break; + case S3ConditionType.IfNoneMatch: + copyObjectRequest.ETagToNotMatch = etagConditionalValue; + break; + case S3ConditionType.IfModifiedSince: + copyObjectRequest.ModifiedSinceDateUtc = conditionDateValue.GetValueOrDefault(); + break; + case S3ConditionType.IfUnmodifiedSince: + copyObjectRequest.UnmodifiedSinceDateUtc = conditionDateValue.GetValueOrDefault(); + break; + default: + throw new ArgumentOutOfRangeException(nameof(conditionType), conditionType, null); + } - var response = await _amazonS3.PutObjectLockConfigurationAsync(request); - Console.WriteLine($"\tAdded a default retention to bucket {bucketName}."); - return response.HttpStatusCode == System.Net.HttpStatusCode.OK; + await _amazonS3.CopyObjectAsync(copyObjectRequest); + _logger.LogInformation($"Conditional copy successful for key {destKey} in bucket {destBucket}."); + return true; } - catch (AmazonS3Exception ex) + catch (AmazonS3Exception e) { - Console.WriteLine($"\tError modifying object lock: '{ex.Message}'"); + if (e.ErrorCode == "PreconditionFailed") + { + _logger.LogError("Conditional copy failed: Precondition failed"); + } + else if (e.ErrorCode == "304") + { + _logger.LogError("Conditional copy failed: Object not modified"); + } + else + { + _logger.LogError($"Unexpected error: {e.ErrorCode}"); + throw; + } return false; } } - // snippet-end:[S3LockWorkflow.dotnetv3.ModifyBucketDefaultRetention] + // snippet-end:[S3ConditionalRequests.dotnetv3.CopyObjectConditional] - // snippet-start:[S3LockWorkflow.dotnetv3.GetObjectRetention] /// - /// Get the retention period for an S3 object. + /// Create a new Amazon S3 bucket with a specified name and check that the bucket is ready. /// - /// The bucket of the object. - /// The object key. - /// The object retention details. - public async Task GetObjectRetention(string bucketName, - string objectKey) + /// The name of the bucket to create. + /// True if successful. + public async Task CreateBucketWithName(string bucketName) { + Console.WriteLine($"\tCreating bucket {bucketName}."); try { - var request = new GetObjectRetentionRequest() + var request = new PutBucketRequest { BucketName = bucketName, - Key = objectKey + UseClientRegion = true }; - var response = await _amazonS3.GetObjectRetentionAsync(request); - Console.WriteLine($"\tObject retention for {objectKey} in {bucketName}: " + - $"\n\t{response.Retention.Mode} until {response.Retention.RetainUntilDate:d}."); - return response.Retention; - } - catch (AmazonS3Exception ex) - { - Console.WriteLine($"\tUnable to fetch object lock retention: '{ex.Message}'"); - return new ObjectLockRetention(); - } - } - // snippet-end:[S3LockWorkflow.dotnetv3.GetObjectRetention] - - // snippet-start:[S3LockWorkflow.dotnetv3.ModifyObjectLegalHold] - /// - /// Set or modify a legal hold on an object in an S3 bucket. - /// - /// The bucket of the object. - /// The key of the object. - /// The On or Off status for the legal hold. - /// True if successful. - public async Task ModifyObjectLegalHold(string bucketName, - string objectKey, ObjectLockLegalHoldStatus holdStatus) - { - try - { - var request = new PutObjectLegalHoldRequest() + await _amazonS3.PutBucketAsync(request); + var bucketReady = false; + var retries = 5; + while (!bucketReady && retries > 0) { - BucketName = bucketName, - Key = objectKey, - LegalHold = new ObjectLockLegalHold() - { - Status = holdStatus - } - }; + Thread.Sleep(5000); + bucketReady = await Amazon.S3.Util.AmazonS3Util.DoesS3BucketExistV2Async(_amazonS3, bucketName); + retries--; + } - var response = await _amazonS3.PutObjectLegalHoldAsync(request); - Console.WriteLine($"\tModified legal hold for {objectKey} in {bucketName}."); - return response.HttpStatusCode == System.Net.HttpStatusCode.OK; + return bucketReady; + } + catch (BucketAlreadyExistsException ex) + { + Console.WriteLine($"Bucket already exists: '{ex.Message}'"); + return true; } catch (AmazonS3Exception ex) { - Console.WriteLine($"\tError modifying legal hold: '{ex.Message}'"); + Console.WriteLine($"Error creating bucket: '{ex.Message}'"); return false; } } - // snippet-end:[S3LockWorkflow.dotnetv3.ModifyObjectLegalHold] - // snippet-start:[S3LockWorkflow.dotnetv3.GetObjectLegalHold] /// - /// Get the legal hold details for an S3 object. + /// Cleans up objects and deletes the bucket by name. /// - /// The bucket of the object. - /// The object key. - /// The object legal hold details. - public async Task GetObjectLegalHold(string bucketName, - string objectKey) + /// The name of the bucket. + /// Async task. + public async Task CleanupBucketByName(string bucketName) { try { - var request = new GetObjectLegalHoldRequest() + var listObjectsResponse = await _amazonS3.ListObjectsV2Async(new ListObjectsV2Request { BucketName = bucketName }); + foreach (var obj in listObjectsResponse.S3Objects) { - BucketName = bucketName, - Key = objectKey - }; - - var response = await _amazonS3.GetObjectLegalHoldAsync(request); - Console.WriteLine($"\tObject legal hold for {objectKey} in {bucketName}: " + - $"\n\tStatus: {response.LegalHold.Status}"); - return response.LegalHold; + await _amazonS3.DeleteObjectAsync(new DeleteObjectRequest { BucketName = bucketName, Key = obj.Key }); + } + await _amazonS3.DeleteBucketAsync(new DeleteBucketRequest { BucketName = bucketName }); + Console.WriteLine($"Cleaned up bucket: {bucketName}."); } - catch (AmazonS3Exception ex) + catch (AmazonS3Exception e) { - Console.WriteLine($"\tUnable to fetch legal hold: '{ex.Message}'"); - return new ObjectLockLegalHold(); + if (e.ErrorCode == "NoSuchBucket") + { + Console.WriteLine($"Bucket {bucketName} does not exist, skipping cleanup."); + } + else + { + Console.WriteLine($"Error deleting bucket: {e.ErrorCode}"); + throw; + } } } - // snippet-end:[S3LockWorkflow.dotnetv3.GetObjectLegalHold] - // snippet-start:[S3LockWorkflow.dotnetv3.GetBucketObjectLockConfiguration] /// - /// Get the object lock configuration details for an S3 bucket. + /// List the contents of the bucket with their ETag. /// - /// The bucket to get details. - /// The bucket's object lock configuration details. - public async Task GetBucketObjectLockConfiguration(string bucketName) + /// The name of the bucket. + /// Async task. + public async Task> ListBucketContentsByName(string bucketName) { + var results = new List(); try { - var request = new GetObjectLockConfigurationRequest() + Console.WriteLine($"\t Items in bucket {bucketName}"); + var listObjectsResponse = await _amazonS3.ListObjectsV2Async(new ListObjectsV2Request { BucketName = bucketName }); + if (listObjectsResponse.S3Objects.Count == 0) { - BucketName = bucketName - }; - - var response = await _amazonS3.GetObjectLockConfigurationAsync(request); - Console.WriteLine($"\tBucket object lock config for {bucketName} in {bucketName}: " + - $"\n\tEnabled: {response.ObjectLockConfiguration.ObjectLockEnabled}" + - $"\n\tRule: {response.ObjectLockConfiguration.Rule?.DefaultRetention}"); - - return response.ObjectLockConfiguration; - } - catch (AmazonS3Exception ex) - { - Console.WriteLine($"\tUnable to fetch object lock config: '{ex.Message}'"); - return new ObjectLockConfiguration(); - } - } - // snippet-end:[S3LockWorkflow.dotnetv3.GetBucketObjectLockConfiguration] - - // snippet-start:[S3LockWorkflow.dotnetv3.UploadFileAsync] - /// - /// Upload a file from the local computer to an Amazon S3 bucket. - /// - /// The Amazon S3 bucket to use. - /// The object to upload. - /// The path, including file name, of the object to upload. - /// True if success. - public async Task UploadFileAsync(string bucketName, string objectName, string filePath) - { - var request = new PutObjectRequest - { - BucketName = bucketName, - Key = objectName, - FilePath = filePath, - ChecksumAlgorithm = ChecksumAlgorithm.SHA256 - }; + Console.WriteLine("\t\tNo objects found."); + } + else + { + foreach (var obj in listObjectsResponse.S3Objects) + { + Console.WriteLine($"\t\t object: {obj.Key} ETag {obj.ETag}"); + } + } + results = listObjectsResponse.S3Objects; - var response = await _amazonS3.PutObjectAsync(request); - if (response.HttpStatusCode == System.Net.HttpStatusCode.OK) - { - Console.WriteLine($"\tSuccessfully uploaded {objectName} to {bucketName}."); - return true; } - else + catch (AmazonS3Exception e) { - Console.WriteLine($"\tCould not upload {objectName} to {bucketName}."); - return false; + if (e.ErrorCode == "NoSuchBucket") + { + _logger.LogError($"Bucket {bucketName} does not exist."); + } + else + { + _logger.LogError($"Error listing bucket and objects: {e.ErrorCode}"); + throw; + } } - } - // snippet-end:[S3LockWorkflow.dotnetv3.UploadFileAsync] - // snippet-start:[S3LockWorkflow.dotnetv3.ListBucketObjectsAndVersions] - /// - /// List bucket objects and versions. - /// - /// The Amazon S3 bucket to use. - /// The list of objects and versions. - public async Task ListBucketObjectsAndVersions(string bucketName) - { - var request = new ListVersionsRequest() - { - BucketName = bucketName - }; - - var response = await _amazonS3.ListVersionsAsync(request); - return response; + return results; } - // snippet-end:[S3LockWorkflow.dotnetv3.ListBucketObjectsAndVersions] - // snippet-start:[S3LockWorkflow.dotnetv3.DeleteObjectFromBucket] /// /// Delete an object from a specific bucket. /// /// The Amazon S3 bucket to use. /// The key of the object to delete. - /// True if the object has retention settings. - /// Optional versionId. /// True if successful. - public async Task DeleteObjectFromBucket(string bucketName, string objectKey, bool hasRetention, string? versionId = null) + public async Task DeleteObjectFromBucket(string bucketName, string objectKey) { try { var request = new DeleteObjectRequest() { BucketName = bucketName, - Key = objectKey, - VersionId = versionId, + Key = objectKey }; - if (hasRetention) - { - // Set the BypassGovernanceRetention header - // if the file has retention settings. - request.BypassGovernanceRetention = true; - } await _amazonS3.DeleteObjectAsync(request); - Console.WriteLine( - $"Deleted {objectKey} in {bucketName}."); + Console.WriteLine($"Deleted {objectKey} in {bucketName}."); return true; } catch (AmazonS3Exception ex) @@ -403,20 +339,25 @@ public async Task DeleteObjectFromBucket(string bucketName, string objectK return false; } } - // snippet-end:[S3LockWorkflow.dotnetv3.DeleteObjectFromBucket] - // snippet-start:[S3LockWorkflow.dotnetv3.DeleteBucketByName] /// - /// Delete a specific bucket. + /// Delete a specific bucket by deleting the objects and then the bucket itself. /// /// The Amazon S3 bucket to use. /// The key of the object to delete. /// Optional versionId. /// True if successful. - public async Task DeleteBucketByName(string bucketName) + public async Task CleanUpBucketByName(string bucketName) { try { + var allFiles = await ListBucketContentsByName(bucketName); + + foreach (var fileInfo in allFiles) + { + await DeleteObjectFromBucket(fileInfo.BucketName, fileInfo.Key); + } + var request = new DeleteBucketRequest() { BucketName = bucketName, }; var response = await _amazonS3.DeleteBucketAsync(request); Console.WriteLine($"\tDelete for {bucketName} complete."); @@ -429,7 +370,6 @@ public async Task DeleteBucketByName(string bucketName) } } - // snippet-end:[S3LockWorkflow.dotnetv3.DeleteBucketByName] } -// snippet-end:[S3LockWorkflow.dotnetv3.S3ActionsWrapper] \ No newline at end of file +// snippet-end:[S3ConditionalRequests.dotnetv3.S3ActionsWrapper] \ No newline at end of file diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.cs b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.cs index f401d091e66..1d7a0fd8017 100644 --- a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.cs +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.cs @@ -1,10 +1,9 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -// snippet-start:[S3LockWorkflow.dotnetv3.ObjectLockWorkflow] +// snippet-start:[S3ConditionalRequests.dotnetv3.Scenario] using Amazon.S3; -using Amazon.S3.Model; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; @@ -12,29 +11,29 @@ using Microsoft.Extensions.Logging.Console; using Microsoft.Extensions.Logging.Debug; -namespace S3ObjectLockScenario; +namespace S3ConditionalRequestsScenario; public static class S3ConditionalRequestsScenario { /* Before running this .NET code example, set up your development environment, including your credentials. - This .NET example performs the following tasks: - 1. Create test Amazon Simple Storage Service (S3) buckets with different lock policies. - 2. Upload sample objects to each bucket. - 3. Set some Legal Hold and Retention Periods on objects and buckets. - 4. Investigate lock policies by viewing settings or attempting to delete or overwrite objects. - 5. Clean up objects and buckets. + This example demonstrates the use of conditional requests for S3 operations. + You can use conditional requests to add preconditions to S3 read requests to return or copy + an object based on its Entity tag (ETag), or last modified date. + You can use a conditional write requests to prevent overwrites by ensuring + there is no existing object with the same key. */ public static S3ActionsWrapper _s3ActionsWrapper = null!; public static IConfiguration _configuration = null!; - private static string _resourcePrefix = null!; - private static string noLockBucketName = null!; - private static string lockEnabledBucketName = null!; - private static string retentionAfterCreationBucketName = null!; - private static List bucketNames = new List(); - private static List fileNames = new List(); + public static string _resourcePrefix = null!; + public static string _sourceBucketName = null!; + public static string _destinationBucketName = null!; + public static string _sampleObjectKey = null!; + public static string _sampleObjectEtag = null!; + public static bool _interactive = true; + public static async Task Main(string[] args) { @@ -57,18 +56,17 @@ public static async Task Main(string[] args) true) // Optionally, load local settings. .Build(); - ConfigurationSetup(); - ServicesSetup(host); try { Console.WriteLine(new string('-', 80)); - Console.WriteLine("Welcome to the Amazon Simple Storage Service (S3) Object Locking Workflow Scenario."); + Console.WriteLine("Welcome to the Amazon Simple Storage Service (S3) Conditional Requests Feature Scenario."); Console.WriteLine(new string('-', 80)); - await Setup(true); + ConfigurationSetup(); + _sampleObjectEtag = await Setup(_sourceBucketName, _destinationBucketName, _sampleObjectKey); - await DemoActionChoices(); + await DisplayDemoChoices(_sourceBucketName, _destinationBucketName, _sampleObjectKey, _sampleObjectEtag, 0); Console.WriteLine(new string('-', 80)); Console.WriteLine("Cleaning up resources."); @@ -76,16 +74,49 @@ public static async Task Main(string[] args) await Cleanup(true); Console.WriteLine(new string('-', 80)); - Console.WriteLine("Amazon S3 Object Locking Workflow is complete."); + Console.WriteLine("Amazon S3 Conditional Requests Feature Scenario is complete."); Console.WriteLine(new string('-', 80)); } catch (Exception ex) { Console.WriteLine(new string('-', 80)); Console.WriteLine($"There was a problem: {ex.Message}"); + await CleanupScenario(_sourceBucketName, _destinationBucketName); + Console.WriteLine(new string('-', 80)); + } + } + + public static async Task RunScenario() + { + try + { + Console.WriteLine(new string('-', 80)); + Console.WriteLine("Welcome to the Amazon Simple Storage Service (S3) Conditional Requests Feature Scenario."); + Console.WriteLine(new string('-', 80)); + ConfigurationSetup(); + _sampleObjectEtag = await Setup(_sourceBucketName, _destinationBucketName, _sampleObjectKey); + + await DisplayDemoChoices(_sourceBucketName, _destinationBucketName, _sampleObjectKey, _sampleObjectEtag, 0); + + Console.WriteLine(new string('-', 80)); + Console.WriteLine("Cleaning up resources."); + Console.WriteLine(new string('-', 80)); await Cleanup(true); + + Console.WriteLine(new string('-', 80)); + Console.WriteLine("Amazon S3 Conditional Requests Feature Scenario is complete."); Console.WriteLine(new string('-', 80)); } + catch (Exception ex) + { + Console.WriteLine(new string('-', 80)); + Console.WriteLine($"There was a problem: {ex.Message}"); + await CleanupScenario(_sourceBucketName, _destinationBucketName); + Console.WriteLine(new string('-', 80)); + return false; + } + + return true; } /// @@ -104,242 +135,151 @@ public static void ConfigurationSetup() { _resourcePrefix = _configuration["resourcePrefix"] ?? "dotnet-example"; - noLockBucketName = _resourcePrefix + "-no-lock"; - lockEnabledBucketName = _resourcePrefix + "-lock-enabled"; - retentionAfterCreationBucketName = _resourcePrefix + "-retention-after-creation"; - - bucketNames.Add(noLockBucketName); - bucketNames.Add(lockEnabledBucketName); - bucketNames.Add(retentionAfterCreationBucketName); + _sourceBucketName = _resourcePrefix + "-source"; + _destinationBucketName = _resourcePrefix + "-dest"; + _sampleObjectKey = _resourcePrefix + "-sample-object.txt"; } - // - /// Deploy necessary resources for the scenario. + /// + /// Sets up the scenario by creating a source and destination bucket, and uploading a test file to the source bucket. /// - /// True to run as interactive. - /// True if successful. - public static async Task Setup(bool interactive) + /// The name of the source bucket. + /// The name of the destination bucket. + /// The name of the test file to add to the source bucket. + /// The ETag of the uploaded test file. + public static async Task Setup(string sourceBucket, string destBucket, string objectKey) { Console.WriteLine( - "\nFor this workflow, we will use the AWS SDK for .NET to create several S3\n" + - "buckets and files to demonstrate working with S3 locking features.\n"); + "\nFor this scenario, we will use the AWS SDK for .NET to create several S3\n" + + "buckets and files to demonstrate working with S3 conditional requests.\n" + + "This example demonstrates the use of conditional requests for S3 operations.\r\n" + + "You can use conditional requests to add preconditions to S3 read requests to return or copy\r\n" + + "an object based on its Entity tag (ETag), or last modified date. \r\n" + + "You can use a conditional write requests to prevent overwrites by ensuring \r\n" + + "there is no existing object with the same key. \r\n\r\n" + + "This example will allow you to perform conditional reads\r\n" + + "and writes that will succeed or fail based on your selected options.\r\n\r\n" + + "Sample buckets and a sample object will be created as part of the example."); Console.WriteLine(new string('-', 80)); Console.WriteLine("Press Enter when you are ready to start."); - if (interactive) - Console.ReadLine(); - - Console.WriteLine("\nS3 buckets can be created either with or without object lock enabled."); - await _s3ActionsWrapper.CreateBucketWithObjectLock(noLockBucketName, false); - await _s3ActionsWrapper.CreateBucketWithObjectLock(lockEnabledBucketName, true); - await _s3ActionsWrapper.CreateBucketWithObjectLock(retentionAfterCreationBucketName, false); - - Console.WriteLine("Press Enter to continue."); - if (interactive) + if (_interactive) Console.ReadLine(); - Console.WriteLine("\nA bucket can be configured to use object locking with a default retention period."); - await _s3ActionsWrapper.ModifyBucketDefaultRetention(retentionAfterCreationBucketName, true, - ObjectLockRetentionMode.Governance, DateTime.UtcNow.AddDays(1)); + await _s3ActionsWrapper.CreateBucketWithName(sourceBucket); + await _s3ActionsWrapper.CreateBucketWithName(destBucket); - Console.WriteLine("Press Enter to continue."); - if (interactive) - Console.ReadLine(); - - Console.WriteLine("\nObject lock policies can also be added to existing buckets."); - await _s3ActionsWrapper.EnableObjectLockOnBucket(lockEnabledBucketName); - - Console.WriteLine("Press Enter to continue."); - if (interactive) - Console.ReadLine(); + var eTag = await _s3ActionsWrapper.PutObjectConditional(objectKey, sourceBucket, + "Test file content."); - // Upload some files to the buckets. - Console.WriteLine("\nNow let's add some test files:"); - var fileName = _configuration["exampleFileName"] ?? "exampleFile.txt"; - int fileCount = 2; - // Create the file if it does not already exist. - if (!File.Exists(fileName)) - { - await using StreamWriter sw = File.CreateText(fileName); - await sw.WriteLineAsync( - "This is a sample file for uploading to a bucket."); - } + return eTag; + } - foreach (var bucketName in bucketNames) - { - for (int i = 0; i < fileCount; i++) - { - var numberedFileName = Path.GetFileNameWithoutExtension(fileName) + i + Path.GetExtension(fileName); - fileNames.Add(numberedFileName); - await _s3ActionsWrapper.UploadFileAsync(bucketName, numberedFileName, fileName); - } - } - Console.WriteLine("Press Enter to continue."); - if (interactive) - Console.ReadLine(); + /// + /// Cleans up the scenario by deleting the source and destination buckets. + /// + /// The name of the source bucket. + /// The name of the destination bucket. + public static async Task CleanupScenario(string sourceBucket, string destBucket) + { + await _s3ActionsWrapper.CleanupBucketByName(sourceBucket); + await _s3ActionsWrapper.CleanupBucketByName(destBucket); + } - if (!interactive) - return true; - Console.WriteLine("\nNow we can set some object lock policies on individual files:"); - foreach (var bucketName in bucketNames) - { - for (int i = 0; i < fileNames.Count; i++) - { - // No modifications to the objects in the first bucket. - if (bucketName != bucketNames[0]) - { - var exampleFileName = fileNames[i]; - switch (i) - { - case 0: - { - var question = - $"\nWould you like to add a legal hold to {exampleFileName} in {bucketName}? (y/n)"; - if (GetYesNoResponse(question)) - { - // Set a legal hold. - await _s3ActionsWrapper.ModifyObjectLegalHold(bucketName, exampleFileName, ObjectLockLegalHoldStatus.On); - - } - break; - } - case 1: - { - var question = - $"\nWould you like to add a 1 day Governance retention period to {exampleFileName} in {bucketName}? (y/n)" + - "\nReminder: Only a user with the s3:BypassGovernanceRetention permission will be able to delete this file or its bucket until the retention period has expired."; - if (GetYesNoResponse(question)) - { - // Set a Governance mode retention period for 1 day. - await _s3ActionsWrapper.ModifyObjectRetentionPeriod( - bucketName, exampleFileName, - ObjectLockRetentionMode.Governance, - DateTime.UtcNow.AddDays(1)); - } - break; - } - } - } - } - } - Console.WriteLine(new string('-', 80)); - return true; + /// + /// Displays a list of the objects in the test buckets. + /// + /// The name of the source bucket. + /// The name of the destination bucket. + public static async Task DisplayBuckets(string sourceBucket, string destBucket) + { + await _s3ActionsWrapper.ListBucketContentsByName(sourceBucket); + await _s3ActionsWrapper.ListBucketContentsByName(destBucket); } - // - /// List all of the current buckets and objects. + /// + /// Displays the menu of conditional request options for the user. /// - /// True to run as interactive. - /// The list of buckets and objects. - public static async Task> ListBucketsAndObjects(bool interactive) + /// The name of the source bucket. + /// The name of the destination bucket. + /// The key of the test object in the source bucket. + /// The ETag of the test object in the source bucket. + public static async Task DisplayDemoChoices(string sourceBucket, string destBucket, string objectKey, string etag, int defaultChoice) { - var allObjects = new List(); - foreach (var bucketName in bucketNames) + var actions = new[] { - var objectsInBucket = await _s3ActionsWrapper.ListBucketObjectsAndVersions(bucketName); - foreach (var objectKey in objectsInBucket.Versions) - { - allObjects.Add(objectKey); - } - } + "Print a list of bucket items.", + "Perform a conditional read.", + "Perform a conditional copy.", + "Perform a conditional write.", + "Clean up and exit." + }; + + var conditions = new[] + { + "If-Match: using the object's ETag. This condition should succeed.", + "If-None-Match: using the object's ETag. This condition should fail.", + "If-Modified-Since: using yesterday's date. This condition should succeed.", + "If-Unmodified-Since: using yesterday's date. This condition should fail." + }; - if (interactive) + var conditionTypes = new[] { - Console.WriteLine("\nCurrent buckets and objects:\n"); - int i = 0; - foreach (var bucketObject in allObjects) - { - i++; - Console.WriteLine( - $"{i}: {bucketObject.Key} \n\tBucket: {bucketObject.BucketName}\n\tVersion: {bucketObject.VersionId}"); - } - } + S3ConditionType.IfMatch, + S3ConditionType.IfNoneMatch, + S3ConditionType.IfModifiedSince, + S3ConditionType.IfUnmodifiedSince, + }; - return allObjects; - } + var yesterdayDate = DateTime.UtcNow.AddDays(-1); - /// - /// Present the user with the demo action choices. - /// - /// Async task. - public static async Task DemoActionChoices() - { - var choices = new string[]{ - "List all files in buckets.", - "Attempt to delete a file.", - "Attempt to delete a file with retention period bypass.", - "Attempt to overwrite a file.", - "View the object and bucket retention settings for a file.", - "View the legal hold settings for a file.", - "Finish the workflow."}; - - var choice = 0; - // Keep asking the user until they choose to move on. - while (choice != 6) + int choice; + while ((choice = GetChoiceResponse("\nExplore the S3 conditional request features by selecting one of the following choices:", actions, defaultChoice)) != 4) { - Console.WriteLine(new string('-', 80)); - choice = GetChoiceResponse( - "\nExplore the S3 locking features by selecting one of the following choices:" - , choices); - Console.WriteLine(new string('-', 80)); switch (choice) { case 0: - { - await ListBucketsAndObjects(true); - break; - } + Console.WriteLine("Listing the objects and buckets."); + await DisplayBuckets(sourceBucket, destBucket); + break; case 1: + int conditionTypeIndex = GetChoiceResponse("Perform a conditional read:", conditions, 1); + if (conditionTypeIndex == 0 || conditionTypeIndex == 1) { - Console.WriteLine("\nEnter the number of the object to delete:"); - var allFiles = await ListBucketsAndObjects(true); - var fileChoice = GetChoiceResponse(null, allFiles.Select(f => f.Key).ToArray()); - await _s3ActionsWrapper.DeleteObjectFromBucket(allFiles[fileChoice].BucketName, allFiles[fileChoice].Key, false, allFiles[fileChoice].VersionId); - break; - } - case 2: - { - Console.WriteLine("\nEnter the number of the object to delete:"); - var allFiles = await ListBucketsAndObjects(true); - var fileChoice = GetChoiceResponse(null, allFiles.Select(f => f.Key).ToArray()); - await _s3ActionsWrapper.DeleteObjectFromBucket(allFiles[fileChoice].BucketName, allFiles[fileChoice].Key, true, allFiles[fileChoice].VersionId); - break; + await _s3ActionsWrapper.GetObjectConditional(objectKey, sourceBucket, conditionTypes[conditionTypeIndex], null, _sampleObjectEtag); } - case 3: + else if (conditionTypeIndex == 2 || conditionTypeIndex == 3) { - var allFiles = await ListBucketsAndObjects(true); - Console.WriteLine("\nEnter the number of the object to overwrite:"); - var fileChoice = GetChoiceResponse(null, allFiles.Select(f => f.Key).ToArray()); - // Create the file if it does not already exist. - if (!File.Exists(allFiles[fileChoice].Key)) - { - await using StreamWriter sw = File.CreateText(allFiles[fileChoice].Key); - await sw.WriteLineAsync( - "This is a sample file for uploading to a bucket."); - } - await _s3ActionsWrapper.UploadFileAsync(allFiles[fileChoice].BucketName, allFiles[fileChoice].Key, allFiles[fileChoice].Key); - break; + await _s3ActionsWrapper.GetObjectConditional(objectKey, sourceBucket, conditionTypes[conditionTypeIndex], yesterdayDate); } - case 4: + break; + case 2: + int copyConditionTypeIndex = GetChoiceResponse("Perform a conditional copy:", conditions, 1); + string destKey = GetStringResponse("Enter an object key:", "sampleObjectKey"); + if (copyConditionTypeIndex == 0 || copyConditionTypeIndex == 1) { - var allFiles = await ListBucketsAndObjects(true); - Console.WriteLine("\nEnter the number of the object and bucket to view:"); - var fileChoice = GetChoiceResponse(null, allFiles.Select(f => f.Key).ToArray()); - await _s3ActionsWrapper.GetObjectRetention(allFiles[fileChoice].BucketName, allFiles[fileChoice].Key); - await _s3ActionsWrapper.GetBucketObjectLockConfiguration(allFiles[fileChoice].BucketName); - break; + await _s3ActionsWrapper.CopyObjectConditional(objectKey, destKey, sourceBucket, destBucket, conditionTypes[copyConditionTypeIndex], null, etag); } - case 5: + else if (copyConditionTypeIndex == 2 || copyConditionTypeIndex == 3) { - var allFiles = await ListBucketsAndObjects(true); - Console.WriteLine("\nEnter the number of the object to view:"); - var fileChoice = GetChoiceResponse(null, allFiles.Select(f => f.Key).ToArray()); - await _s3ActionsWrapper.GetObjectLegalHold(allFiles[fileChoice].BucketName, allFiles[fileChoice].Key); - break; + await _s3ActionsWrapper.CopyObjectConditional(objectKey, destKey, sourceBucket, destBucket, conditionTypes[copyConditionTypeIndex], yesterdayDate); } + break; + case 3: + Console.WriteLine("Perform a conditional write using IfNoneMatch condition on the object key."); + Console.WriteLine("If the key is a duplicate, the write will fail."); + string newObjectKey = GetStringResponse("Enter an object key:", "newObjectKey"); + await _s3ActionsWrapper.PutObjectConditional(newObjectKey, sourceBucket, "Conditional write example data."); + break; + } + + if (!_interactive) + { + break; } } - return true; + + Console.WriteLine("Proceeding to cleanup."); } // @@ -353,27 +293,8 @@ public static async Task Cleanup(bool interactive) if (!interactive || GetYesNoResponse("Do you want to clean up all files and buckets? (y/n) ")) { - // Remove all locks and delete all buckets and objects. - var allFiles = await ListBucketsAndObjects(false); - foreach (var fileInfo in allFiles) - { - // Check for a legal hold. - var legalHold = await _s3ActionsWrapper.GetObjectLegalHold(fileInfo.BucketName, fileInfo.Key); - if (legalHold?.Status?.Value == ObjectLockLegalHoldStatus.On) - { - await _s3ActionsWrapper.ModifyObjectLegalHold(fileInfo.BucketName, fileInfo.Key, ObjectLockLegalHoldStatus.Off); - } - - // Check for a retention period. - var retention = await _s3ActionsWrapper.GetObjectRetention(fileInfo.BucketName, fileInfo.Key); - var hasRetentionPeriod = retention?.Mode == ObjectLockRetentionMode.Governance && retention.RetainUntilDate > DateTime.UtcNow.Date; - await _s3ActionsWrapper.DeleteObjectFromBucket(fileInfo.BucketName, fileInfo.Key, hasRetentionPeriod, fileInfo.VersionId); - } - - foreach (var bucketName in bucketNames) - { - await _s3ActionsWrapper.DeleteBucketByName(bucketName); - } + await _s3ActionsWrapper.CleanUpBucketByName(_sourceBucketName); + await _s3ActionsWrapper.CleanUpBucketByName(_destinationBucketName); } else @@ -407,7 +328,7 @@ private static bool GetYesNoResponse(string question) /// The question string to print on the console. /// The choices to print on the console. /// The index of the selected choice - private static int GetChoiceResponse(string? question, string[] choices) + private static int GetChoiceResponse(string? question, string[] choices, int defaultChoice) { if (question != null) { @@ -419,6 +340,9 @@ private static int GetChoiceResponse(string? question, string[] choices) } } + if (!_interactive) + return defaultChoice; + var choiceNumber = 0; while (choiceNumber < 1 || choiceNumber > choices.Length) { @@ -428,5 +352,30 @@ private static int GetChoiceResponse(string? question, string[] choices) return choiceNumber - 1; } + + /// + /// Get a string response from the user. + /// + /// The question to print. + /// A default answer to use when not interactive. + /// The string response. + public static string GetStringResponse(string? question, string defaultAnswer) + { + string? answer = ""; + if (_interactive) + { + do + { + Console.WriteLine(question); + answer = Console.ReadLine(); + } while (string.IsNullOrWhiteSpace(answer)); + } + else + { + answer = defaultAnswer; + } + + return answer; + } } -// snippet-end:[S3LockWorkflow.dotnetv3.ObjectLockWorkflow] \ No newline at end of file +// snippet-end:[S3ConditionalRequests.dotnetv3.Scenario] \ No newline at end of file diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.csproj b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.csproj index 4eb4d3dc299..48505357654 100644 --- a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.csproj +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.csproj @@ -2,18 +2,19 @@ Exe - net6.0 + net8.0 enable enable - - - - - - + + + + + + + diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/settings.json b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/settings.json index 872c2a7f9a7..9dd2c6c96bd 100644 --- a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/settings.json +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/settings.json @@ -1,4 +1,3 @@ { - "resourcePrefix": "dotnet-s3-lock-example", - "exampleFileName": "dotnet-example-file.txt" + "resourcePrefix": "dotnet-s3-conditional-requests-example" } diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsScenarioTests.cs b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsScenarioTests.cs index 04a65ae6b34..cd8298e4781 100644 --- a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsScenarioTests.cs +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsScenarioTests.cs @@ -3,13 +3,13 @@ using Amazon.S3; using Microsoft.Extensions.Configuration; -using S3ObjectLockScenario; -using Xunit.Extensions.Ordering; +using Microsoft.Extensions.Logging; +using S3ConditionalRequestsScenario; -namespace S3ObjectLockTests; +namespace S3ConditionalRequestsTests; /// -/// Tests for the ObjectLockScenario example. +/// Tests for the Conditional Requests example. /// public class S3ConditionalRequestsScenarioTests { @@ -17,6 +17,7 @@ public class S3ConditionalRequestsScenarioTests private readonly S3ActionsWrapper _s3ActionsWrapper = null!; private readonly string _resourcePrefix; + private readonly ILoggerFactory _loggerFactory; /// /// Constructor for the test class. @@ -30,79 +31,49 @@ public S3ConditionalRequestsScenarioTests() true) // Optionally, load local settings. .Build(); - _resourcePrefix = _configuration["resourcePrefix"] ?? "dotnet-example"; - - _s3ActionsWrapper = new S3ActionsWrapper( - new AmazonS3Client(), - _configuration); - - S3ObjectLockWorkflow._s3ActionsWrapper = _s3ActionsWrapper; - S3ObjectLockWorkflow._configuration = _configuration; - } - - /// - /// Run the setup step of the workflow. Should return successful. - /// - /// Async task. - [Fact] - [Order(1)] - [Trait("Category", "Integration")] - public async Task TestSetup() - { - // Arrange. - S3ObjectLockWorkflow.ConfigurationSetup(); - - // Act. - var success = await S3ObjectLockWorkflow.Setup(false); - - var finished = false; - while (!finished) + _loggerFactory = LoggerFactory.Create(builder => { - // Make sure the buckets are available before moving on. - var created = await S3ObjectLockWorkflow.ListBucketsAndObjects(false); - finished = created.Count > 0; - } + builder.AddConsole(); + }); - // Assert. - Assert.True(success); - } + _resourcePrefix = _configuration["resourcePrefix"] ?? "dotnet-example-test"; - /// - /// Run the list object step of the workflow. Should return successful. - /// - /// Async task. - [Fact] - [Order(2)] - [Trait("Category", "Integration")] - public async Task TestObjects() - { - // Arrange. - S3ObjectLockWorkflow.ConfigurationSetup(); - - // Act. - var objects = await S3ObjectLockWorkflow.ListBucketsAndObjects(false); + _s3ActionsWrapper = new S3ActionsWrapper( + new AmazonS3Client(), new Logger(_loggerFactory)); - // Assert. - Assert.NotEmpty(objects); + S3ConditionalRequestsScenario.S3ConditionalRequestsScenario._s3ActionsWrapper = _s3ActionsWrapper; + S3ConditionalRequestsScenario.S3ConditionalRequestsScenario._configuration = _configuration; } - /// - /// Run the cleanup step of the workflow. Should return successful. + /// Run the setup step of the workflow. Should return successful. /// /// Async task. [Fact] - [Order(3)] [Trait("Category", "Integration")] - public async Task TestCleanup() + public async Task TestScenario() { // Arrange. - S3ObjectLockWorkflow.ConfigurationSetup(); + S3ConditionalRequestsScenario.S3ConditionalRequestsScenario._interactive = false; // Act. - var success = await S3ObjectLockWorkflow.Cleanup(false); + S3ConditionalRequestsScenario.S3ConditionalRequestsScenario.ConfigurationSetup(); + var sourceName = S3ConditionalRequestsScenario.S3ConditionalRequestsScenario + ._sourceBucketName; + var destName = S3ConditionalRequestsScenario.S3ConditionalRequestsScenario + ._destinationBucketName; + var objKey = S3ConditionalRequestsScenario.S3ConditionalRequestsScenario + ._sampleObjectKey; + var sampleObjectEtag = await S3ConditionalRequestsScenario.S3ConditionalRequestsScenario.Setup(sourceName, destName, objKey); + + // Run all the options of the demo. No exceptions should be thrown. + await S3ConditionalRequestsScenario.S3ConditionalRequestsScenario.DisplayDemoChoices(sourceName, destName, objKey, sampleObjectEtag, 1); + await S3ConditionalRequestsScenario.S3ConditionalRequestsScenario.DisplayDemoChoices(sourceName, destName, objKey, sampleObjectEtag, 2); + await S3ConditionalRequestsScenario.S3ConditionalRequestsScenario.DisplayDemoChoices(sourceName, destName, objKey, sampleObjectEtag, 3); + await S3ConditionalRequestsScenario.S3ConditionalRequestsScenario.DisplayDemoChoices(sourceName, destName, objKey, sampleObjectEtag, 4); + await S3ConditionalRequestsScenario.S3ConditionalRequestsScenario.Cleanup(false); // Assert. - Assert.True(success); + Assert.NotNull(sampleObjectEtag); } } \ No newline at end of file diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsTests.csproj b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsTests.csproj index 5cd1fc94523..7e259f4ddf4 100644 --- a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsTests.csproj +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/S3ConditionalRequestsTests.csproj @@ -1,7 +1,7 @@ - net6.0 + net8.0 enable enable @@ -9,15 +9,17 @@ - - - - - + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive all - + runtime; build; native; contentfiles; analyzers; buildtransitive all @@ -34,7 +36,7 @@ - + diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/testsettings.json b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/testsettings.json index 872c2a7f9a7..e5cdbb2ebd2 100644 --- a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/testsettings.json +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequestsTests/testsettings.json @@ -1,4 +1,3 @@ { - "resourcePrefix": "dotnet-s3-lock-example", - "exampleFileName": "dotnet-example-file.txt" + "resourcePrefix": "dotnet-s3-conditional-requests-example-test" } From 013261d20983f0c0e732b8bf96244d766d8e1395 Mon Sep 17 00:00:00 2001 From: Rachel Hagerman <110480692+rlhagerm@users.noreply.github.com> Date: Mon, 23 Dec 2024 13:31:12 -0600 Subject: [PATCH 3/4] Documentation updates. --- .../S3ConditionalRequestsScenario/README.md | 30 ++++++++----------- workflows/s3_conditional_requests/README.md | 1 + 2 files changed, 13 insertions(+), 18 deletions(-) diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/README.md b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/README.md index ec8f5b0cf00..6de6a77bd9f 100644 --- a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/README.md +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/README.md @@ -1,20 +1,10 @@ -# Amazon S3 Object Lock Workflow +# Amazon S3 Conditional Requests Feature Scenario for the SDK for .NET ## Overview -This example shows how to use AWS SDKs to work with Amazon Simple Storage Service (Amazon S3) object locking features. The workflow demonstrates how to create, update, view, and modify object locks, as well as how locked objects behave regarding requests to delete and overwrite. +This example demonstrates how to use the AWS SDK for Python (boto3) to work with Amazon Simple Storage Service (Amazon S3) conditional request features. The scenario demonstrates how to add preconditions to S3 operations, and how those operations will succeed or fail based on the conditional requests. -[Amazon S3 Object Lock](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lock.html) can help prevent Amazon S3 objects from being deleted or overwritten for a fixed amount of time or indefinitely. Object Lock can help meet regulatory requirements or protect against object changes or deletion. - -![Object Lock Features](../../../../workflows/s3_object_lock/resources/Diagram_Amazon-S3-Object-Lock.png) - -This workflow demonstrates the following steps and tasks: -1. Add object lock settings to both new and existing S3 buckets. - 1. Add objects to buckets with optional object lock or retention period settings. -2. Attempt to delete or overwrite locked objects. -3. Retrieve and view the object lock and retention period settings of buckets and objects. -4. Delete the objects and buckets. - 1. Remove any object locks and use the BypassGovernanceRetention setting. +[Amazon S3 Conditional Requests](https://docs.aws.amazon.com/AmazonS3/latest/userguide/conditional-requests.html) are used to add preconditions to S3 read, copy, or write requests. ## ⚠ Important @@ -25,15 +15,19 @@ This workflow demonstrates the following steps and tasks: ## Scenario +This example uses a feature scenario to demonstrate various aspects of S3 conditional requests. The scenario is divided into three stages: + +1. **Setup**: Create test buckets and objects. +2. **Conditional Reads and Writes**: Explore S3 conditional requests by listing objects, attempting to read or write with conditional requests, and viewing request results. +3. **Clean**: Delete all objects and buckets. + ### Prerequisites For general prerequisites, see the [README](../../../README.md) in the `dotnetv3` folder. ### Resources -The workflow scenario steps create the buckets and objects needed for the example. No additional resources are required. - -This workflow includes an optional step to add a governance mode retention period of one day to objects in an S3 bucket. In order to delete these objects, you must have the `s3:BypassGovernanceRetention` permission. If you do not have this permission, you will be unable to delete these objects until the retention period has expired. +The scenario steps create the buckets and objects needed for the example. No additional resources are required. ### Instructions @@ -46,11 +40,11 @@ dotnet run Alternatively, you can run the example from within your IDE. -This starts an interactive scenario that walks you through creating, exploring, and deleting S3 buckets and objects with various object lock settings. +This starts an interactive scenario that walks you through exploring conditional requests for read, write, and copy operations. ## Additional resources -- [S3 Object Lock](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lock.html) +- [Amazon S3 Developer Guide](https://docs.aws.amazon.com/AmazonS3/latest/userguide/conditional-requests.html) --- diff --git a/workflows/s3_conditional_requests/README.md b/workflows/s3_conditional_requests/README.md index 7eb52dcbe57..8fef0170182 100644 --- a/workflows/s3_conditional_requests/README.md +++ b/workflows/s3_conditional_requests/README.md @@ -21,6 +21,7 @@ The scenario steps create the buckets and objects needed for the example. No add This example is implemented in the following languages: - [Python](../../python/example_code/s3/scenarios/conditional_requests/README.md) +- [.NET](../../dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/README.md) ## Additional reading From c98178f067e146a9f9efde5389154e388c0fa835 Mon Sep 17 00:00:00 2001 From: Rachel Hagerman <110480692+rlhagerm@users.noreply.github.com> Date: Mon, 23 Dec 2024 13:32:48 -0600 Subject: [PATCH 4/4] Removing unused methods. --- .../S3ConditionalRequestsScenario.cs | 33 ------------------- 1 file changed, 33 deletions(-) diff --git a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.cs b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.cs index 1d7a0fd8017..eff162f721f 100644 --- a/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.cs +++ b/dotnetv3/S3/scenarios/S3ConditionalRequestsScenario/S3ConditionalRequests/S3ConditionalRequestsScenario.cs @@ -86,39 +86,6 @@ public static async Task Main(string[] args) } } - public static async Task RunScenario() - { - try - { - Console.WriteLine(new string('-', 80)); - Console.WriteLine("Welcome to the Amazon Simple Storage Service (S3) Conditional Requests Feature Scenario."); - Console.WriteLine(new string('-', 80)); - ConfigurationSetup(); - _sampleObjectEtag = await Setup(_sourceBucketName, _destinationBucketName, _sampleObjectKey); - - await DisplayDemoChoices(_sourceBucketName, _destinationBucketName, _sampleObjectKey, _sampleObjectEtag, 0); - - Console.WriteLine(new string('-', 80)); - Console.WriteLine("Cleaning up resources."); - Console.WriteLine(new string('-', 80)); - await Cleanup(true); - - Console.WriteLine(new string('-', 80)); - Console.WriteLine("Amazon S3 Conditional Requests Feature Scenario is complete."); - Console.WriteLine(new string('-', 80)); - } - catch (Exception ex) - { - Console.WriteLine(new string('-', 80)); - Console.WriteLine($"There was a problem: {ex.Message}"); - await CleanupScenario(_sourceBucketName, _destinationBucketName); - Console.WriteLine(new string('-', 80)); - return false; - } - - return true; - } - /// /// Populate the services for use within the console application. ///